Yek Yi Wei
P2107631
DAAA/FT/2B/03
import warnings
warnings.filterwarnings("ignore")
# Import Liabraries
import numpy as np
import pandas as pd
import random
import matplotlib.pyplot as plt
import seaborn as sns
import tensorflow as tf
from tensorflow import keras
# import keras_tuner
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras import Sequential, Input
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Flatten, Activation, Softmax, Dense, Conv2D, MaxPooling2D, GlobalAveragePooling2D, Dropout, Rescaling, BatchNormalization, SpatialDropout2D, ZeroPadding2D, Add, add
from tensorflow.keras.callbacks import History, EarlyStopping
from sklearn.metrics import confusion_matrix, classification_report
from tensorflow.keras.optimizers import Adam
2022-11-25 12:49:04.069364: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
CIFAR100 images are originally 32x32. For modern CNN architectures (such as Resnet18) we have to upscale these images to proper size (256x256). Doing this upscaling during data loading can cause CPU bottleneck. So here I have preporcessed the images to 256x256 so that you can directly load them.
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
for dev in physical_devices:
print(dev)
PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')
2022-11-25 12:49:09.601484: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:49:09.605023: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:49:09.605203: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
tf.keras.datasets.cifar100.load_data(label_mode="fine")
(X_train, y_train), (X_test, y_test) = tf.keras.datasets.cifar100.load_data()
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
(50000, 32, 32, 3) (50000, 1) (10000, 32, 32, 3) (10000, 1)
y_train, y_test = np.concatenate(y_train), np.concatenate(y_test)
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
(50000, 32, 32, 3) (50000,) (10000, 32, 32, 3) (10000,)
plt.imshow(X_train[0] , cmap=plt.cm.binary)
plt.show()
import pandas as pd
data=pd.DataFrame(y_test.tolist(), columns = ["Class"])
print('Train Set Class Distribution:\n')
print(data["Class"].value_counts().sort_index())
Train Set Class Distribution:
0 100
1 100
2 100
3 100
4 100
...
95 100
96 100
97 100
98 100
99 100
Name: Class, Length: 100, dtype: int64
Distribution of all the classes
# Plot Distribution
plt.figure(figsize=(10, 5))
data["Class"].value_counts().plot(kind='bar',
title='Distribution of classes'
)
<AxesSubplot: title={'center': 'Distribution of classes'}>
def ind(array, item):
## This function finds the index of a value
for idx, val in np.ndenumerate(array):
if val == item:
return idx
## Generate the index of the first occurence of each class
class_index = []
for i in np.unique(y_train):
class_index.append(ind(y_train,i)[0])
print(class_index)
[2, 4, 202, 342, 102, 285, 55, 178, 16, 53, 244, 3, 179, 240, 51, 72, 573, 13, 75, 0, 57, 34, 35, 8, 38, 241, 150, 68, 7, 1, 133, 9, 121, 67, 120, 65, 60, 212, 83, 10, 144, 211, 28, 63, 177, 41, 77, 32, 152, 42, 71, 64, 27, 69, 167, 61, 44, 270, 378, 21, 300, 104, 141, 143, 26, 33, 86, 125, 116, 93, 22, 14, 62, 50, 20, 82, 45, 87, 40, 157, 18, 36, 12, 66, 25, 162, 5, 23, 124, 48, 6, 313, 70, 99, 112, 94, 11, 17, 58, 97]
# Name of all classes in CIFAR-100
class_names= ['apple', 'aquarium_fish', 'baby', 'bear', 'beaver', 'bed', 'bee', 'beetle', 'bicycle', 'bottle', 'bowl', 'boy', 'bridge', 'bus', 'butterfly',
'camel', 'can', 'castle', 'caterpillar', 'cattle', 'chair', 'chimpanzee', 'clock', 'cloud', 'cockroach', 'couch', 'crab', 'crocodile', 'cup',
'dinosaur', 'dolphin', 'elephant', 'flatfish', 'forest', 'fox', 'girl', 'hamster', 'house', 'kangaroo', 'computer_keyboard',
'lamp', 'lawn_mower', 'leopard', 'lion', 'lizard', 'lobster', 'man', 'maple_tree', 'motorcycle', 'mountain', 'mouse', 'mushroom',
'oak_tree', 'orange', 'orchid', 'otter', 'palm_tree', 'pear', 'pickup_truck', 'pine_tree', 'plain', 'plate', 'poppy', 'porcupine', 'possum',
'rabbit', 'raccoon', 'ray', 'road', 'rocket', 'rose',
'sea', 'seal', 'shark', 'shrew', 'skunk', 'skyscraper', 'snail', 'snake', 'spider', 'squirrel', 'streetcar', 'sunflower', 'sweet_pepper',
'table', 'tank', 'telephone', 'television', 'tiger', 'tractor', 'train', 'trout', 'tulip', 'turtle',
'wardrobe', 'whale', 'willow_tree', 'wolf', 'woman', 'worm']
plt.figure(figsize=(20,22))
h, w = 10, 10
for i,val in enumerate(class_index):
plt.subplot(h, w, i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(X_train[val], cmap=plt.cm.binary)
plt.xlabel(class_names[i])
plt.show()
-Normalize the data
-Scaled the values to a range of 0 to 1 before feeding to the neural network model.
-Divide the values by 255.
-The training set and the testing set are preprocessed in the same way
# Import libraries for preprocessing images
from tensorflow.keras.utils import to_categorical
# Normalizing the data
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
# Rescale pixel values from 0 - 255 to 0 - 1
X_train = X_train / 255
X_test = X_test / 255
# One hot encode labels
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
(50000, 32, 32, 3) (50000, 100) (10000, 32, 32, 3) (10000, 100)
# Data Augmentation
# Adding data augmentation for creating more images
# Divide train and validation set
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# Configuration for creating new images
train_datagen = ImageDataGenerator(
rotation_range=20,
horizontal_flip=True,
validation_split=0.1
)
train_datagen.fit(X_train)
Adding the Augmented data to the default data
for X_batch, y_batch in train_datagen.flow(X_train, y_train, batch_size=60000):
X_train_aug = np.concatenate((X_train, X_batch))
y_train_aug = np.concatenate((y_train, y_batch))
break
print(X_train_aug.shape)
print(y_train_aug.shape)
(100000, 32, 32, 3) (100000, 100)
Plot accuracy on the training and validation datasets over training epochs.
Plot loss on the training and validation datasets over training epochs.
# Plot function for visualisation training process
import matplotlib.pyplot as plt
# function to plot the learning curves
def plot_results(history):
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(20,7))
ax1.plot(history['loss'], label="Train loss")
ax1.plot(history['val_loss'], label="Validation loss")
ax2.plot(history['acc'], label="Train accuracy")
ax2.plot(history['val_acc'], label="Validation accuracy")
ax1.legend(fontsize=15);
ax2.legend(fontsize=15);
ax1.set_title('Loss per epoch', fontsize=25);
ax1.set_xlabel("epoch", fontsize=20);
ax2.set_title('Accuracy per epoch', fontsize=25);
ax2.set_xlabel("epoch", fontsize=20);
ax1.grid(True)
ax2.grid(True);
Early Stopping is a callback that allows you to specify the performance measure to monitor, the trigger, and once triggered, it will stop the training process.
from tensorflow.keras.callbacks import EarlyStopping
#Setting early_stopping callback
early_stopping = EarlyStopping(
monitor='val_acc',
patience=15,
min_delta=0.0000001,
restore_best_weights=True,
)
baseModel = Sequential([
Conv2D(input_shape=(32, 32, 3), kernel_size=3, padding='same', strides=(2, 2), filters=32),
Flatten(input_shape=(32,32,3)),
Dense(64, activation='relu'),
Dense(100, activation='softmax')
])
baseModel.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 16, 16, 32) 896
flatten (Flatten) (None, 8192) 0
dense (Dense) (None, 64) 524352
dense_1 (Dense) (None, 100) 6500
=================================================================
Total params: 531,748
Trainable params: 531,748
Non-trainable params: 0
_________________________________________________________________
# Configure the model for training
from tensorflow.keras import optimizers
baseModel.compile(loss='categorical_crossentropy',
optimizer=optimizers.Adam(learning_rate=1e-4),
metrics=['acc'])
#Base Model without Data augmentation
#steps_per_epoch= params divided by batch_size
# Training model
import time
training_start = time.time()
history = baseModel.fit(X_train, y_train, batch_size=64, steps_per_epoch=100, epochs=100,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
Epoch 1/100 100/100 [==============================] - 6s 62ms/step - loss: 3.4017 - acc: 0.2077 - val_loss: 3.4654 - val_acc: 0.1942 Epoch 2/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3743 - acc: 0.2081 - val_loss: 3.4601 - val_acc: 0.1983 Epoch 3/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3620 - acc: 0.2081 - val_loss: 3.4608 - val_acc: 0.1994 Epoch 4/100 100/100 [==============================] - 4s 44ms/step - loss: 3.4062 - acc: 0.1992 - val_loss: 3.4588 - val_acc: 0.2040 Epoch 5/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3971 - acc: 0.2106 - val_loss: 3.4534 - val_acc: 0.1972 Epoch 6/100 100/100 [==============================] - 5s 46ms/step - loss: 3.3892 - acc: 0.2072 - val_loss: 3.4524 - val_acc: 0.1989 Epoch 7/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3829 - acc: 0.2037 - val_loss: 3.4445 - val_acc: 0.2029 Epoch 8/100 100/100 [==============================] - 5s 45ms/step - loss: 3.3853 - acc: 0.2040 - val_loss: 3.4473 - val_acc: 0.1987 Epoch 9/100 100/100 [==============================] - 4s 45ms/step - loss: 3.3570 - acc: 0.2148 - val_loss: 3.4426 - val_acc: 0.2051 Epoch 10/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3531 - acc: 0.2106 - val_loss: 3.4412 - val_acc: 0.2002 Epoch 11/100 100/100 [==============================] - 4s 43ms/step - loss: 3.3565 - acc: 0.2108 - val_loss: 3.4422 - val_acc: 0.1998 Epoch 12/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3617 - acc: 0.2072 - val_loss: 3.4410 - val_acc: 0.2022 Epoch 13/100 100/100 [==============================] - 5s 45ms/step - loss: 3.3364 - acc: 0.2094 - val_loss: 3.4361 - val_acc: 0.2029 Epoch 14/100 100/100 [==============================] - 5s 47ms/step - loss: 3.3582 - acc: 0.2061 - val_loss: 3.4311 - val_acc: 0.2031 Epoch 15/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3756 - acc: 0.2147 - val_loss: 3.4328 - val_acc: 0.2035 Epoch 16/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3085 - acc: 0.2184 - val_loss: 3.4299 - val_acc: 0.2013 Epoch 17/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3486 - acc: 0.2181 - val_loss: 3.4324 - val_acc: 0.2024 Epoch 18/100 100/100 [==============================] - 4s 43ms/step - loss: 3.3357 - acc: 0.2166 - val_loss: 3.4279 - val_acc: 0.2081 Epoch 19/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3616 - acc: 0.2113 - val_loss: 3.4251 - val_acc: 0.2015 Epoch 20/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3130 - acc: 0.2097 - val_loss: 3.4230 - val_acc: 0.2040 Epoch 21/100 100/100 [==============================] - 5s 46ms/step - loss: 3.3370 - acc: 0.2206 - val_loss: 3.4183 - val_acc: 0.2028 Epoch 22/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3003 - acc: 0.2209 - val_loss: 3.4189 - val_acc: 0.2064 Epoch 23/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2926 - acc: 0.2236 - val_loss: 3.4188 - val_acc: 0.2032 Epoch 24/100 100/100 [==============================] - 5s 46ms/step - loss: 3.3150 - acc: 0.2275 - val_loss: 3.4135 - val_acc: 0.2074 Epoch 25/100 100/100 [==============================] - 5s 45ms/step - loss: 3.2806 - acc: 0.2336 - val_loss: 3.4118 - val_acc: 0.2053 Epoch 26/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2899 - acc: 0.2141 - val_loss: 3.4134 - val_acc: 0.2014 Epoch 27/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3004 - acc: 0.2219 - val_loss: 3.4113 - val_acc: 0.2064 Epoch 28/100 100/100 [==============================] - 5s 45ms/step - loss: 3.3018 - acc: 0.2216 - val_loss: 3.4083 - val_acc: 0.2104 Epoch 29/100 100/100 [==============================] - 4s 45ms/step - loss: 3.3187 - acc: 0.2211 - val_loss: 3.4115 - val_acc: 0.2082 Epoch 30/100 100/100 [==============================] - 5s 46ms/step - loss: 3.3187 - acc: 0.2138 - val_loss: 3.4044 - val_acc: 0.2075 Epoch 31/100 100/100 [==============================] - 5s 46ms/step - loss: 3.2725 - acc: 0.2236 - val_loss: 3.4037 - val_acc: 0.2088 Epoch 32/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2437 - acc: 0.2273 - val_loss: 3.4066 - val_acc: 0.2080 Epoch 33/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2622 - acc: 0.2230 - val_loss: 3.4009 - val_acc: 0.2080 Epoch 34/100 100/100 [==============================] - 5s 45ms/step - loss: 3.2883 - acc: 0.2250 - val_loss: 3.4053 - val_acc: 0.2071 Epoch 35/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2781 - acc: 0.2297 - val_loss: 3.4011 - val_acc: 0.2107 Epoch 36/100 100/100 [==============================] - 5s 45ms/step - loss: 3.2383 - acc: 0.2386 - val_loss: 3.3962 - val_acc: 0.2106 Epoch 37/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3015 - acc: 0.2212 - val_loss: 3.3958 - val_acc: 0.2075 Epoch 38/100 100/100 [==============================] - 4s 43ms/step - loss: 3.2849 - acc: 0.2255 - val_loss: 3.3934 - val_acc: 0.2094 Epoch 39/100 100/100 [==============================] - 4s 44ms/step - loss: 3.3019 - acc: 0.2292 - val_loss: 3.3938 - val_acc: 0.2115 Epoch 40/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2556 - acc: 0.2276 - val_loss: 3.3925 - val_acc: 0.2100 Epoch 41/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2396 - acc: 0.2256 - val_loss: 3.3935 - val_acc: 0.2078 Epoch 42/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2578 - acc: 0.2328 - val_loss: 3.3920 - val_acc: 0.2116 Epoch 43/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2838 - acc: 0.2280 - val_loss: 3.3922 - val_acc: 0.2111 Epoch 44/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2696 - acc: 0.2339 - val_loss: 3.3895 - val_acc: 0.2102 Epoch 45/100 100/100 [==============================] - 5s 47ms/step - loss: 3.2367 - acc: 0.2334 - val_loss: 3.3887 - val_acc: 0.2106 Epoch 46/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2188 - acc: 0.2344 - val_loss: 3.3927 - val_acc: 0.2104 Epoch 47/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2452 - acc: 0.2333 - val_loss: 3.3850 - val_acc: 0.2114 Epoch 48/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2219 - acc: 0.2381 - val_loss: 3.3859 - val_acc: 0.2100 Epoch 49/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2166 - acc: 0.2328 - val_loss: 3.3839 - val_acc: 0.2121 Epoch 50/100 100/100 [==============================] - 5s 46ms/step - loss: 3.2321 - acc: 0.2331 - val_loss: 3.3903 - val_acc: 0.2053 Epoch 51/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2231 - acc: 0.2378 - val_loss: 3.3833 - val_acc: 0.2111 Epoch 52/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2355 - acc: 0.2380 - val_loss: 3.3848 - val_acc: 0.2109 Epoch 53/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2215 - acc: 0.2355 - val_loss: 3.3793 - val_acc: 0.2094 Epoch 54/100 100/100 [==============================] - 4s 43ms/step - loss: 3.2372 - acc: 0.2331 - val_loss: 3.3781 - val_acc: 0.2118 Epoch 55/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2563 - acc: 0.2253 - val_loss: 3.3786 - val_acc: 0.2110 Epoch 56/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2038 - acc: 0.2344 - val_loss: 3.3837 - val_acc: 0.2107 Epoch 57/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2070 - acc: 0.2425 - val_loss: 3.3812 - val_acc: 0.2112 Epoch 58/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2018 - acc: 0.2408 - val_loss: 3.3792 - val_acc: 0.2134 Epoch 59/100 100/100 [==============================] - 4s 44ms/step - loss: 3.2081 - acc: 0.2419 - val_loss: 3.3745 - val_acc: 0.2141 Epoch 60/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2170 - acc: 0.2336 - val_loss: 3.3739 - val_acc: 0.2142 Epoch 61/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2025 - acc: 0.2387 - val_loss: 3.3708 - val_acc: 0.2161 Epoch 62/100 100/100 [==============================] - 4s 45ms/step - loss: 3.1954 - acc: 0.2419 - val_loss: 3.3672 - val_acc: 0.2118 Epoch 63/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2213 - acc: 0.2399 - val_loss: 3.3759 - val_acc: 0.2124 Epoch 64/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2025 - acc: 0.2397 - val_loss: 3.3758 - val_acc: 0.2106 Epoch 65/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1604 - acc: 0.2477 - val_loss: 3.3657 - val_acc: 0.2133 Epoch 66/100 100/100 [==============================] - 4s 45ms/step - loss: 3.2151 - acc: 0.2344 - val_loss: 3.3749 - val_acc: 0.2110 Epoch 67/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1734 - acc: 0.2458 - val_loss: 3.3714 - val_acc: 0.2192 Epoch 68/100 100/100 [==============================] - 4s 43ms/step - loss: 3.2053 - acc: 0.2475 - val_loss: 3.3672 - val_acc: 0.2156 Epoch 69/100 100/100 [==============================] - 4s 43ms/step - loss: 3.1762 - acc: 0.2411 - val_loss: 3.3646 - val_acc: 0.2161 Epoch 70/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1857 - acc: 0.2394 - val_loss: 3.3663 - val_acc: 0.2149 Epoch 71/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1754 - acc: 0.2432 - val_loss: 3.3656 - val_acc: 0.2139 Epoch 72/100 100/100 [==============================] - 4s 43ms/step - loss: 3.1563 - acc: 0.2509 - val_loss: 3.3689 - val_acc: 0.2130 Epoch 73/100 100/100 [==============================] - 4s 45ms/step - loss: 3.1884 - acc: 0.2447 - val_loss: 3.3685 - val_acc: 0.2152 Epoch 74/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1830 - acc: 0.2461 - val_loss: 3.3626 - val_acc: 0.2151 Epoch 75/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1562 - acc: 0.2523 - val_loss: 3.3687 - val_acc: 0.2136 Epoch 76/100 100/100 [==============================] - 4s 45ms/step - loss: 3.1850 - acc: 0.2380 - val_loss: 3.3640 - val_acc: 0.2115 Epoch 77/100 100/100 [==============================] - 4s 45ms/step - loss: 3.1570 - acc: 0.2483 - val_loss: 3.3590 - val_acc: 0.2152 Epoch 78/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1743 - acc: 0.2366 - val_loss: 3.3593 - val_acc: 0.2152 Epoch 79/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1842 - acc: 0.2421 - val_loss: 3.3627 - val_acc: 0.2182 Epoch 80/100 100/100 [==============================] - 4s 43ms/step - loss: 3.1448 - acc: 0.2466 - val_loss: 3.3594 - val_acc: 0.2205 Epoch 81/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1667 - acc: 0.2511 - val_loss: 3.3602 - val_acc: 0.2124 Epoch 82/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1548 - acc: 0.2498 - val_loss: 3.3606 - val_acc: 0.2155 Epoch 83/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1542 - acc: 0.2533 - val_loss: 3.3588 - val_acc: 0.2173 Epoch 84/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1139 - acc: 0.2525 - val_loss: 3.3641 - val_acc: 0.2157 Epoch 85/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1711 - acc: 0.2408 - val_loss: 3.3583 - val_acc: 0.2127 Epoch 86/100 100/100 [==============================] - 4s 45ms/step - loss: 3.1668 - acc: 0.2495 - val_loss: 3.3528 - val_acc: 0.2169 Epoch 87/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1139 - acc: 0.2509 - val_loss: 3.3536 - val_acc: 0.2166 Epoch 88/100 100/100 [==============================] - 4s 45ms/step - loss: 3.1470 - acc: 0.2489 - val_loss: 3.3552 - val_acc: 0.2155 Epoch 89/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1410 - acc: 0.2506 - val_loss: 3.3546 - val_acc: 0.2174 Epoch 90/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1357 - acc: 0.2553 - val_loss: 3.3530 - val_acc: 0.2159 Epoch 91/100 100/100 [==============================] - 4s 43ms/step - loss: 3.1426 - acc: 0.2511 - val_loss: 3.3506 - val_acc: 0.2175 Epoch 92/100 100/100 [==============================] - 4s 43ms/step - loss: 3.1325 - acc: 0.2547 - val_loss: 3.3520 - val_acc: 0.2168 Epoch 93/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1453 - acc: 0.2572 - val_loss: 3.3541 - val_acc: 0.2147 Epoch 94/100 100/100 [==============================] - 4s 44ms/step - loss: 3.1387 - acc: 0.2552 - val_loss: 3.3507 - val_acc: 0.2170 Epoch 95/100 100/100 [==============================] - 4s 45ms/step - loss: 3.1251 - acc: 0.2570 - val_loss: 3.3511 - val_acc: 0.2171 Training time: 431.7014887332916
Model severely overfits the train data which cauzes the test data to be very low
# plotting the learning curves
plot_results(history.history)
score = baseModel.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 3.359449625015259 Val accuracy: 0.22050000727176666
# Training model
import time
training_start = time.time()
history = baseModel.fit(train_datagen.flow(X_train_aug, y_train_aug, batch_size=64),
steps_per_epoch=100,
epochs=100,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
Epoch 1/100 100/100 [==============================] - 24s 240ms/step - loss: 4.1953 - acc: 0.0684 - val_loss: 4.2044 - val_acc: 0.0673 Epoch 2/100 100/100 [==============================] - 24s 240ms/step - loss: 4.1634 - acc: 0.0708 - val_loss: 4.1529 - val_acc: 0.0771 Epoch 3/100 100/100 [==============================] - 24s 241ms/step - loss: 4.1459 - acc: 0.0759 - val_loss: 4.1161 - val_acc: 0.0821 Epoch 4/100 100/100 [==============================] - 24s 242ms/step - loss: 4.1023 - acc: 0.0816 - val_loss: 4.0879 - val_acc: 0.0822 Epoch 5/100 100/100 [==============================] - 24s 242ms/step - loss: 4.0485 - acc: 0.0886 - val_loss: 4.0485 - val_acc: 0.0934 Epoch 6/100 100/100 [==============================] - 24s 243ms/step - loss: 4.0334 - acc: 0.0967 - val_loss: 4.0389 - val_acc: 0.0942 Epoch 7/100 100/100 [==============================] - 24s 236ms/step - loss: 3.9899 - acc: 0.0991 - val_loss: 3.9958 - val_acc: 0.1006 Epoch 8/100 100/100 [==============================] - 24s 241ms/step - loss: 3.9963 - acc: 0.0995 - val_loss: 3.9835 - val_acc: 0.0968 Epoch 9/100 100/100 [==============================] - 24s 240ms/step - loss: 3.9537 - acc: 0.1020 - val_loss: 3.9588 - val_acc: 0.1019 Epoch 10/100 100/100 [==============================] - 24s 243ms/step - loss: 3.9868 - acc: 0.0978 - val_loss: 3.9426 - val_acc: 0.1019 Epoch 11/100 100/100 [==============================] - 24s 241ms/step - loss: 3.9290 - acc: 0.1078 - val_loss: 3.9231 - val_acc: 0.1102 Epoch 12/100 100/100 [==============================] - 24s 238ms/step - loss: 3.8930 - acc: 0.1152 - val_loss: 3.9228 - val_acc: 0.1104 Epoch 13/100 100/100 [==============================] - 24s 241ms/step - loss: 3.8979 - acc: 0.1163 - val_loss: 3.9001 - val_acc: 0.1147 Epoch 14/100 100/100 [==============================] - 24s 238ms/step - loss: 3.8604 - acc: 0.1203 - val_loss: 3.8724 - val_acc: 0.1170 Epoch 15/100 100/100 [==============================] - 24s 243ms/step - loss: 3.8827 - acc: 0.1195 - val_loss: 3.8728 - val_acc: 0.1184 Epoch 16/100 100/100 [==============================] - 24s 242ms/step - loss: 3.8536 - acc: 0.1192 - val_loss: 3.8526 - val_acc: 0.1171 Epoch 17/100 100/100 [==============================] - 24s 241ms/step - loss: 3.8278 - acc: 0.1275 - val_loss: 3.8330 - val_acc: 0.1252 Epoch 18/100 100/100 [==============================] - 25s 245ms/step - loss: 3.8382 - acc: 0.1216 - val_loss: 3.8242 - val_acc: 0.1226 Epoch 19/100 100/100 [==============================] - 24s 239ms/step - loss: 3.8221 - acc: 0.1270 - val_loss: 3.8137 - val_acc: 0.1264 Epoch 20/100 100/100 [==============================] - 24s 238ms/step - loss: 3.8080 - acc: 0.1280 - val_loss: 3.8034 - val_acc: 0.1268 Epoch 21/100 100/100 [==============================] - 24s 241ms/step - loss: 3.7909 - acc: 0.1312 - val_loss: 3.7945 - val_acc: 0.1324 Epoch 22/100 100/100 [==============================] - 24s 242ms/step - loss: 3.7756 - acc: 0.1314 - val_loss: 3.7876 - val_acc: 0.1329 Epoch 23/100 100/100 [==============================] - 24s 241ms/step - loss: 3.7787 - acc: 0.1353 - val_loss: 3.7801 - val_acc: 0.1339 Epoch 24/100 100/100 [==============================] - 24s 242ms/step - loss: 3.7613 - acc: 0.1378 - val_loss: 3.7670 - val_acc: 0.1375 Epoch 25/100 100/100 [==============================] - 24s 238ms/step - loss: 3.7292 - acc: 0.1422 - val_loss: 3.7560 - val_acc: 0.1416 Epoch 26/100 100/100 [==============================] - 24s 239ms/step - loss: 3.7594 - acc: 0.1398 - val_loss: 3.7500 - val_acc: 0.1463 Epoch 27/100 100/100 [==============================] - 24s 243ms/step - loss: 3.7337 - acc: 0.1420 - val_loss: 3.7385 - val_acc: 0.1445 Epoch 28/100 100/100 [==============================] - 24s 239ms/step - loss: 3.7417 - acc: 0.1375 - val_loss: 3.7333 - val_acc: 0.1438 Epoch 29/100 100/100 [==============================] - 24s 237ms/step - loss: 3.7089 - acc: 0.1505 - val_loss: 3.7298 - val_acc: 0.1447 Epoch 30/100 100/100 [==============================] - 24s 241ms/step - loss: 3.7250 - acc: 0.1364 - val_loss: 3.7172 - val_acc: 0.1492 Epoch 31/100 100/100 [==============================] - 24s 241ms/step - loss: 3.7188 - acc: 0.1445 - val_loss: 3.7034 - val_acc: 0.1510 Epoch 32/100 100/100 [==============================] - 24s 239ms/step - loss: 3.7252 - acc: 0.1506 - val_loss: 3.7082 - val_acc: 0.1495 Epoch 33/100 100/100 [==============================] - 24s 240ms/step - loss: 3.6845 - acc: 0.1548 - val_loss: 3.6887 - val_acc: 0.1553 Epoch 34/100 100/100 [==============================] - 24s 244ms/step - loss: 3.6926 - acc: 0.1509 - val_loss: 3.6881 - val_acc: 0.1533 Epoch 35/100 100/100 [==============================] - 24s 241ms/step - loss: 3.6744 - acc: 0.1534 - val_loss: 3.6850 - val_acc: 0.1536 Epoch 36/100 100/100 [==============================] - 24s 243ms/step - loss: 3.6742 - acc: 0.1550 - val_loss: 3.6786 - val_acc: 0.1521 Epoch 37/100 100/100 [==============================] - 24s 243ms/step - loss: 3.6564 - acc: 0.1577 - val_loss: 3.6689 - val_acc: 0.1585 Epoch 38/100 100/100 [==============================] - 24s 242ms/step - loss: 3.6569 - acc: 0.1564 - val_loss: 3.6664 - val_acc: 0.1582 Epoch 39/100 100/100 [==============================] - 24s 238ms/step - loss: 3.6293 - acc: 0.1645 - val_loss: 3.6620 - val_acc: 0.1584 Epoch 40/100 100/100 [==============================] - 24s 243ms/step - loss: 3.6553 - acc: 0.1586 - val_loss: 3.6519 - val_acc: 0.1567 Epoch 41/100 100/100 [==============================] - 24s 241ms/step - loss: 3.6636 - acc: 0.1592 - val_loss: 3.6450 - val_acc: 0.1596 Epoch 42/100 100/100 [==============================] - 24s 242ms/step - loss: 3.5913 - acc: 0.1750 - val_loss: 3.6460 - val_acc: 0.1617 Epoch 43/100 100/100 [==============================] - 24s 239ms/step - loss: 3.6056 - acc: 0.1652 - val_loss: 3.6339 - val_acc: 0.1642 Epoch 44/100 100/100 [==============================] - 24s 240ms/step - loss: 3.6000 - acc: 0.1684 - val_loss: 3.6316 - val_acc: 0.1659 Epoch 45/100 100/100 [==============================] - 24s 241ms/step - loss: 3.6044 - acc: 0.1700 - val_loss: 3.6325 - val_acc: 0.1663 Epoch 46/100 100/100 [==============================] - 24s 240ms/step - loss: 3.5785 - acc: 0.1717 - val_loss: 3.6214 - val_acc: 0.1647 Epoch 47/100 100/100 [==============================] - 24s 241ms/step - loss: 3.6018 - acc: 0.1650 - val_loss: 3.6207 - val_acc: 0.1683 Epoch 48/100 100/100 [==============================] - 24s 242ms/step - loss: 3.6186 - acc: 0.1622 - val_loss: 3.6132 - val_acc: 0.1729 Epoch 49/100 100/100 [==============================] - 24s 239ms/step - loss: 3.6032 - acc: 0.1603 - val_loss: 3.6151 - val_acc: 0.1670 Epoch 50/100 100/100 [==============================] - 24s 241ms/step - loss: 3.5673 - acc: 0.1670 - val_loss: 3.6064 - val_acc: 0.1713 Epoch 51/100 100/100 [==============================] - 24s 240ms/step - loss: 3.5945 - acc: 0.1686 - val_loss: 3.6040 - val_acc: 0.1742 Epoch 52/100 100/100 [==============================] - 24s 241ms/step - loss: 3.5788 - acc: 0.1758 - val_loss: 3.6008 - val_acc: 0.1729 Epoch 53/100 100/100 [==============================] - 24s 241ms/step - loss: 3.5641 - acc: 0.1753 - val_loss: 3.5934 - val_acc: 0.1750 Epoch 54/100 100/100 [==============================] - 24s 244ms/step - loss: 3.5814 - acc: 0.1748 - val_loss: 3.5936 - val_acc: 0.1720 Epoch 55/100 100/100 [==============================] - 24s 238ms/step - loss: 3.5987 - acc: 0.1642 - val_loss: 3.5853 - val_acc: 0.1749 Epoch 56/100 100/100 [==============================] - 24s 240ms/step - loss: 3.5712 - acc: 0.1652 - val_loss: 3.5839 - val_acc: 0.1718 Epoch 57/100 100/100 [==============================] - 24s 243ms/step - loss: 3.5499 - acc: 0.1747 - val_loss: 3.5843 - val_acc: 0.1735 Epoch 58/100 100/100 [==============================] - 25s 246ms/step - loss: 3.5636 - acc: 0.1755 - val_loss: 3.5721 - val_acc: 0.1762 Epoch 59/100 100/100 [==============================] - 24s 241ms/step - loss: 3.5213 - acc: 0.1820 - val_loss: 3.5860 - val_acc: 0.1730 Epoch 60/100 100/100 [==============================] - 24s 242ms/step - loss: 3.5466 - acc: 0.1739 - val_loss: 3.5731 - val_acc: 0.1751 Epoch 61/100 100/100 [==============================] - 25s 245ms/step - loss: 3.5551 - acc: 0.1741 - val_loss: 3.5751 - val_acc: 0.1724 Epoch 62/100 100/100 [==============================] - 24s 244ms/step - loss: 3.5417 - acc: 0.1764 - val_loss: 3.5687 - val_acc: 0.1729 Epoch 63/100 100/100 [==============================] - 24s 243ms/step - loss: 3.5648 - acc: 0.1797 - val_loss: 3.5685 - val_acc: 0.1764 Epoch 64/100 100/100 [==============================] - 24s 242ms/step - loss: 3.5251 - acc: 0.1769 - val_loss: 3.5629 - val_acc: 0.1758 Epoch 65/100 100/100 [==============================] - 24s 240ms/step - loss: 3.5423 - acc: 0.1691 - val_loss: 3.5533 - val_acc: 0.1797 Epoch 66/100 100/100 [==============================] - 24s 239ms/step - loss: 3.5264 - acc: 0.1853 - val_loss: 3.5544 - val_acc: 0.1790 Epoch 67/100 100/100 [==============================] - 24s 238ms/step - loss: 3.5271 - acc: 0.1739 - val_loss: 3.5422 - val_acc: 0.1830 Epoch 68/100 100/100 [==============================] - 24s 241ms/step - loss: 3.5196 - acc: 0.1872 - val_loss: 3.5458 - val_acc: 0.1786 Epoch 69/100 100/100 [==============================] - 24s 239ms/step - loss: 3.5212 - acc: 0.1861 - val_loss: 3.5485 - val_acc: 0.1823 Epoch 70/100 100/100 [==============================] - 24s 238ms/step - loss: 3.5059 - acc: 0.1817 - val_loss: 3.5369 - val_acc: 0.1835 Epoch 71/100 100/100 [==============================] - 24s 239ms/step - loss: 3.5163 - acc: 0.1766 - val_loss: 3.5357 - val_acc: 0.1836 Epoch 72/100 100/100 [==============================] - 24s 240ms/step - loss: 3.4960 - acc: 0.1833 - val_loss: 3.5424 - val_acc: 0.1826 Epoch 73/100 100/100 [==============================] - 24s 242ms/step - loss: 3.5070 - acc: 0.1881 - val_loss: 3.5317 - val_acc: 0.1838 Epoch 74/100 100/100 [==============================] - 24s 241ms/step - loss: 3.4947 - acc: 0.1833 - val_loss: 3.5264 - val_acc: 0.1844 Epoch 75/100 100/100 [==============================] - 24s 239ms/step - loss: 3.5225 - acc: 0.1877 - val_loss: 3.5302 - val_acc: 0.1853 Epoch 76/100 100/100 [==============================] - 24s 242ms/step - loss: 3.5003 - acc: 0.1863 - val_loss: 3.5218 - val_acc: 0.1848 Epoch 77/100 100/100 [==============================] - 24s 241ms/step - loss: 3.5105 - acc: 0.1838 - val_loss: 3.5218 - val_acc: 0.1865 Epoch 78/100 100/100 [==============================] - 24s 238ms/step - loss: 3.4999 - acc: 0.1834 - val_loss: 3.5175 - val_acc: 0.1870 Epoch 79/100 100/100 [==============================] - 24s 239ms/step - loss: 3.4926 - acc: 0.1898 - val_loss: 3.5140 - val_acc: 0.1866 Epoch 80/100 100/100 [==============================] - 24s 238ms/step - loss: 3.4894 - acc: 0.1850 - val_loss: 3.5114 - val_acc: 0.1851 Epoch 81/100 100/100 [==============================] - 24s 239ms/step - loss: 3.4784 - acc: 0.1858 - val_loss: 3.5137 - val_acc: 0.1910 Epoch 82/100 100/100 [==============================] - 24s 242ms/step - loss: 3.5029 - acc: 0.1798 - val_loss: 3.5075 - val_acc: 0.1893 Epoch 83/100 100/100 [==============================] - 24s 241ms/step - loss: 3.5047 - acc: 0.1770 - val_loss: 3.5080 - val_acc: 0.1862 Epoch 84/100 100/100 [==============================] - 24s 244ms/step - loss: 3.5079 - acc: 0.1889 - val_loss: 3.5074 - val_acc: 0.1912 Epoch 85/100 100/100 [==============================] - 24s 238ms/step - loss: 3.4592 - acc: 0.1933 - val_loss: 3.4967 - val_acc: 0.1907 Epoch 86/100 100/100 [==============================] - 24s 242ms/step - loss: 3.4595 - acc: 0.1866 - val_loss: 3.5044 - val_acc: 0.1862 Epoch 87/100 100/100 [==============================] - 24s 239ms/step - loss: 3.4625 - acc: 0.1825 - val_loss: 3.4943 - val_acc: 0.1883 Epoch 88/100 100/100 [==============================] - 24s 239ms/step - loss: 3.4604 - acc: 0.1903 - val_loss: 3.4954 - val_acc: 0.1923 Epoch 89/100 100/100 [==============================] - 24s 242ms/step - loss: 3.4673 - acc: 0.1952 - val_loss: 3.4927 - val_acc: 0.1886 Epoch 90/100 100/100 [==============================] - 24s 244ms/step - loss: 3.4484 - acc: 0.1884 - val_loss: 3.4839 - val_acc: 0.1944 Epoch 91/100 100/100 [==============================] - 24s 241ms/step - loss: 3.4866 - acc: 0.1858 - val_loss: 3.4838 - val_acc: 0.1914 Epoch 92/100 100/100 [==============================] - 24s 243ms/step - loss: 3.4451 - acc: 0.1877 - val_loss: 3.4870 - val_acc: 0.1893 Epoch 93/100 100/100 [==============================] - 24s 240ms/step - loss: 3.4461 - acc: 0.1848 - val_loss: 3.4818 - val_acc: 0.1929 Epoch 94/100 100/100 [==============================] - 24s 242ms/step - loss: 3.4448 - acc: 0.1923 - val_loss: 3.4786 - val_acc: 0.1940 Epoch 95/100 100/100 [==============================] - 24s 241ms/step - loss: 3.4311 - acc: 0.1975 - val_loss: 3.4810 - val_acc: 0.1913 Epoch 96/100 100/100 [==============================] - 24s 239ms/step - loss: 3.4809 - acc: 0.1818 - val_loss: 3.4745 - val_acc: 0.1945 Epoch 97/100 100/100 [==============================] - 24s 239ms/step - loss: 3.4323 - acc: 0.1955 - val_loss: 3.4715 - val_acc: 0.1967 Epoch 98/100 100/100 [==============================] - 24s 242ms/step - loss: 3.4257 - acc: 0.2053 - val_loss: 3.4695 - val_acc: 0.1972 Epoch 99/100 100/100 [==============================] - 24s 239ms/step - loss: 3.4162 - acc: 0.1964 - val_loss: 3.4792 - val_acc: 0.1929 Epoch 100/100 100/100 [==============================] - 24s 241ms/step - loss: 3.4362 - acc: 0.1947 - val_loss: 3.4650 - val_acc: 0.1935 Training time: 2431.4740347862244
# plotting the learning curves
plot_results(history.history)
score = baseModel.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 3.464994192123413 Val accuracy: 0.19349999725818634
model_improved1 = Sequential()
model_improved1.add(Conv2D(input_shape=(32, 32, 3), kernel_size=(2, 2), padding='same', strides=(2, 2), filters=32))
model_improved1.add(MaxPooling2D(pool_size=(2, 2), strides=(1, 1), padding='same'))
model_improved1.add(Conv2D(kernel_size=(2, 2), padding='same', strides=(2, 2), filters=64))
model_improved1.add(MaxPooling2D(pool_size=(2, 2), strides=(1, 1), padding='same'))
model_improved1.add(Flatten())
model_improved1.add(Dense(256, activation='relu'))
model_improved1.add(Dense(128, activation='relu'))
model_improved1.add(Dense(100, activation='softmax'))
model_improved1.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_8 (Conv2D) (None, 16, 16, 32) 416
max_pooling2d_4 (MaxPooling (None, 16, 16, 32) 0
2D)
conv2d_9 (Conv2D) (None, 8, 8, 64) 8256
max_pooling2d_5 (MaxPooling (None, 8, 8, 64) 0
2D)
flatten_1 (Flatten) (None, 4096) 0
dense_2 (Dense) (None, 256) 1048832
dense_3 (Dense) (None, 128) 32896
dense_4 (Dense) (None, 100) 12900
=================================================================
Total params: 1,103,300
Trainable params: 1,103,300
Non-trainable params: 0
_________________________________________________________________
# Configure the model for training
from tensorflow.keras import optimizers
model_improved1.compile(loss='categorical_crossentropy',
optimizer=optimizers.RMSprop(learning_rate=1e-4),
metrics=['acc'])
# Training with default dataset
import time
training_start = time.time()
history = model_improved1.fit(X_train, y_train, batch_size=64, steps_per_epoch=300, epochs=200,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
2022-11-25 13:31:15.652286: W tensorflow/tsl/framework/cpu_allocator_impl.cc:82] Allocation of 614400000 exceeds 10% of free system memory.
Epoch 1/200 300/300 [==============================] - 2s 4ms/step - loss: 4.4675 - acc: 0.0370 - val_loss: 4.2992 - val_acc: 0.0517 Epoch 2/200 300/300 [==============================] - 1s 3ms/step - loss: 4.1581 - acc: 0.0759 - val_loss: 4.0237 - val_acc: 0.0956 Epoch 3/200 300/300 [==============================] - 1s 3ms/step - loss: 3.9163 - acc: 0.1108 - val_loss: 3.8664 - val_acc: 0.1181 Epoch 4/200 300/300 [==============================] - 1s 3ms/step - loss: 3.7737 - acc: 0.1362 - val_loss: 3.7204 - val_acc: 0.1420 Epoch 5/200 300/300 [==============================] - 1s 3ms/step - loss: 3.6627 - acc: 0.1551 - val_loss: 3.6192 - val_acc: 0.1617 Epoch 6/200 300/300 [==============================] - 1s 3ms/step - loss: 3.5564 - acc: 0.1752 - val_loss: 3.5354 - val_acc: 0.1758 Epoch 7/200 300/300 [==============================] - 1s 3ms/step - loss: 3.4945 - acc: 0.1821 - val_loss: 3.4765 - val_acc: 0.1863 Epoch 8/200 300/300 [==============================] - 1s 3ms/step - loss: 3.4115 - acc: 0.1951 - val_loss: 3.4261 - val_acc: 0.1961 Epoch 9/200 300/300 [==============================] - 1s 3ms/step - loss: 3.3735 - acc: 0.2053 - val_loss: 3.3912 - val_acc: 0.2035 Epoch 10/200 300/300 [==============================] - 1s 3ms/step - loss: 3.3065 - acc: 0.2193 - val_loss: 3.3319 - val_acc: 0.2118 Epoch 11/200 300/300 [==============================] - 1s 3ms/step - loss: 3.2509 - acc: 0.2298 - val_loss: 3.2777 - val_acc: 0.2221 Epoch 12/200 300/300 [==============================] - 1s 3ms/step - loss: 3.2213 - acc: 0.2326 - val_loss: 3.2431 - val_acc: 0.2302 Epoch 13/200 300/300 [==============================] - 1s 3ms/step - loss: 3.1835 - acc: 0.2376 - val_loss: 3.2178 - val_acc: 0.2332 Epoch 14/200 300/300 [==============================] - 1s 3ms/step - loss: 3.1216 - acc: 0.2497 - val_loss: 3.1683 - val_acc: 0.2407 Epoch 15/200 300/300 [==============================] - 1s 3ms/step - loss: 3.0924 - acc: 0.2532 - val_loss: 3.1601 - val_acc: 0.2469 Epoch 16/200 300/300 [==============================] - 1s 3ms/step - loss: 3.0699 - acc: 0.2610 - val_loss: 3.1275 - val_acc: 0.2520 Epoch 17/200 300/300 [==============================] - 1s 3ms/step - loss: 3.0170 - acc: 0.2698 - val_loss: 3.0999 - val_acc: 0.2558 Epoch 18/200 300/300 [==============================] - 1s 3ms/step - loss: 2.9886 - acc: 0.2738 - val_loss: 3.0677 - val_acc: 0.2655 Epoch 19/200 300/300 [==============================] - 1s 3ms/step - loss: 2.9501 - acc: 0.2836 - val_loss: 3.0617 - val_acc: 0.2638 Epoch 20/200 300/300 [==============================] - 1s 3ms/step - loss: 2.9254 - acc: 0.2905 - val_loss: 3.0148 - val_acc: 0.2740 Epoch 21/200 300/300 [==============================] - 1s 3ms/step - loss: 2.9162 - acc: 0.2895 - val_loss: 2.9978 - val_acc: 0.2746 Epoch 22/200 300/300 [==============================] - 1s 3ms/step - loss: 2.8459 - acc: 0.3046 - val_loss: 2.9833 - val_acc: 0.2792 Epoch 23/200 300/300 [==============================] - 1s 3ms/step - loss: 2.8603 - acc: 0.3004 - val_loss: 2.9614 - val_acc: 0.2814 Epoch 24/200 300/300 [==============================] - 1s 3ms/step - loss: 2.8082 - acc: 0.3117 - val_loss: 2.9495 - val_acc: 0.2843 Epoch 25/200 300/300 [==============================] - 1s 3ms/step - loss: 2.7816 - acc: 0.3162 - val_loss: 2.9277 - val_acc: 0.2910 Epoch 26/200 300/300 [==============================] - 1s 3ms/step - loss: 2.7788 - acc: 0.3192 - val_loss: 2.9089 - val_acc: 0.2942 Epoch 27/200 300/300 [==============================] - 1s 3ms/step - loss: 2.7160 - acc: 0.3348 - val_loss: 2.8866 - val_acc: 0.3009 Epoch 28/200 300/300 [==============================] - 1s 3ms/step - loss: 2.7251 - acc: 0.3234 - val_loss: 2.8752 - val_acc: 0.3030 Epoch 29/200 300/300 [==============================] - 1s 3ms/step - loss: 2.6949 - acc: 0.3300 - val_loss: 2.8486 - val_acc: 0.3096 Epoch 30/200 300/300 [==============================] - 1s 3ms/step - loss: 2.6513 - acc: 0.3406 - val_loss: 2.8690 - val_acc: 0.3016 Epoch 31/200 300/300 [==============================] - 1s 3ms/step - loss: 2.6588 - acc: 0.3416 - val_loss: 2.8375 - val_acc: 0.3105 Epoch 32/200 300/300 [==============================] - 1s 3ms/step - loss: 2.6186 - acc: 0.3499 - val_loss: 2.8248 - val_acc: 0.3106 Epoch 33/200 300/300 [==============================] - 1s 3ms/step - loss: 2.6010 - acc: 0.3497 - val_loss: 2.8061 - val_acc: 0.3162 Epoch 34/200 300/300 [==============================] - 1s 3ms/step - loss: 2.5898 - acc: 0.3582 - val_loss: 2.8232 - val_acc: 0.3111 Epoch 35/200 300/300 [==============================] - 1s 3ms/step - loss: 2.5567 - acc: 0.3634 - val_loss: 2.7850 - val_acc: 0.3192 Epoch 36/200 300/300 [==============================] - 1s 3ms/step - loss: 2.5373 - acc: 0.3664 - val_loss: 2.7693 - val_acc: 0.3230 Epoch 37/200 300/300 [==============================] - 1s 3ms/step - loss: 2.5250 - acc: 0.3658 - val_loss: 2.7661 - val_acc: 0.3222 Epoch 38/200 300/300 [==============================] - 1s 3ms/step - loss: 2.4893 - acc: 0.3738 - val_loss: 2.7569 - val_acc: 0.3228 Epoch 39/200 300/300 [==============================] - 1s 3ms/step - loss: 2.4964 - acc: 0.3746 - val_loss: 2.7468 - val_acc: 0.3242 Epoch 40/200 300/300 [==============================] - 1s 3ms/step - loss: 2.4507 - acc: 0.3847 - val_loss: 2.7360 - val_acc: 0.3339 Epoch 41/200 300/300 [==============================] - 1s 3ms/step - loss: 2.4527 - acc: 0.3809 - val_loss: 2.7485 - val_acc: 0.3269 Epoch 42/200 300/300 [==============================] - 1s 3ms/step - loss: 2.4273 - acc: 0.3898 - val_loss: 2.7156 - val_acc: 0.3309 Epoch 43/200 300/300 [==============================] - 1s 3ms/step - loss: 2.3965 - acc: 0.3948 - val_loss: 2.7046 - val_acc: 0.3360 Epoch 44/200 300/300 [==============================] - 1s 3ms/step - loss: 2.4034 - acc: 0.3936 - val_loss: 2.6899 - val_acc: 0.3359 Epoch 45/200 300/300 [==============================] - 1s 3ms/step - loss: 2.3543 - acc: 0.4072 - val_loss: 2.6921 - val_acc: 0.3439 Epoch 46/200 300/300 [==============================] - 1s 3ms/step - loss: 2.3574 - acc: 0.4055 - val_loss: 2.6681 - val_acc: 0.3462 Epoch 47/200 300/300 [==============================] - 1s 3ms/step - loss: 2.3386 - acc: 0.4037 - val_loss: 2.6872 - val_acc: 0.3396 Epoch 48/200 300/300 [==============================] - 1s 3ms/step - loss: 2.3194 - acc: 0.4120 - val_loss: 2.6896 - val_acc: 0.3417 Epoch 49/200 300/300 [==============================] - 1s 3ms/step - loss: 2.3059 - acc: 0.4128 - val_loss: 2.6696 - val_acc: 0.3425 Epoch 50/200 300/300 [==============================] - 1s 3ms/step - loss: 2.2805 - acc: 0.4225 - val_loss: 2.6726 - val_acc: 0.3421 Epoch 51/200 300/300 [==============================] - 1s 3ms/step - loss: 2.2715 - acc: 0.4243 - val_loss: 2.6513 - val_acc: 0.3486 Epoch 52/200 300/300 [==============================] - 1s 3ms/step - loss: 2.2640 - acc: 0.4223 - val_loss: 2.6467 - val_acc: 0.3521 Epoch 53/200 300/300 [==============================] - 1s 3ms/step - loss: 2.2040 - acc: 0.4344 - val_loss: 2.6710 - val_acc: 0.3435 Epoch 54/200 300/300 [==============================] - 1s 3ms/step - loss: 2.2295 - acc: 0.4320 - val_loss: 2.6421 - val_acc: 0.3544 Epoch 55/200 300/300 [==============================] - 1s 3ms/step - loss: 2.2300 - acc: 0.4314 - val_loss: 2.6346 - val_acc: 0.3544 Epoch 56/200 300/300 [==============================] - 1s 3ms/step - loss: 2.1870 - acc: 0.4420 - val_loss: 2.6330 - val_acc: 0.3579 Epoch 57/200 300/300 [==============================] - 1s 3ms/step - loss: 2.1767 - acc: 0.4422 - val_loss: 2.6191 - val_acc: 0.3597 Epoch 58/200 300/300 [==============================] - 1s 3ms/step - loss: 2.1509 - acc: 0.4505 - val_loss: 2.6320 - val_acc: 0.3558 Epoch 59/200 300/300 [==============================] - 1s 3ms/step - loss: 2.1473 - acc: 0.4524 - val_loss: 2.6253 - val_acc: 0.3575 Epoch 60/200 300/300 [==============================] - 1s 3ms/step - loss: 2.1373 - acc: 0.4479 - val_loss: 2.6099 - val_acc: 0.3597 Epoch 61/200 300/300 [==============================] - 1s 3ms/step - loss: 2.1038 - acc: 0.4561 - val_loss: 2.6297 - val_acc: 0.3558 Epoch 62/200 300/300 [==============================] - 1s 3ms/step - loss: 2.1186 - acc: 0.4576 - val_loss: 2.6674 - val_acc: 0.3499 Epoch 63/200 300/300 [==============================] - 1s 3ms/step - loss: 2.0787 - acc: 0.4606 - val_loss: 2.6142 - val_acc: 0.3583 Epoch 64/200 300/300 [==============================] - 1s 3ms/step - loss: 2.0501 - acc: 0.4732 - val_loss: 2.5930 - val_acc: 0.3668 Epoch 65/200 300/300 [==============================] - 1s 3ms/step - loss: 2.0849 - acc: 0.4637 - val_loss: 2.5911 - val_acc: 0.3596 Epoch 66/200 300/300 [==============================] - 1s 3ms/step - loss: 2.0333 - acc: 0.4738 - val_loss: 2.5932 - val_acc: 0.3658 Epoch 67/200 300/300 [==============================] - 1s 3ms/step - loss: 2.0231 - acc: 0.4799 - val_loss: 2.6050 - val_acc: 0.3689 Epoch 68/200 300/300 [==============================] - 1s 3ms/step - loss: 2.0314 - acc: 0.4744 - val_loss: 2.5920 - val_acc: 0.3644 Epoch 69/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9809 - acc: 0.4826 - val_loss: 2.5977 - val_acc: 0.3682 Epoch 70/200 300/300 [==============================] - 1s 3ms/step - loss: 2.0056 - acc: 0.4827 - val_loss: 2.5955 - val_acc: 0.3700 Epoch 71/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9582 - acc: 0.4938 - val_loss: 2.5873 - val_acc: 0.3694 Epoch 72/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9581 - acc: 0.4930 - val_loss: 2.6140 - val_acc: 0.3627 Epoch 73/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9779 - acc: 0.4832 - val_loss: 2.5837 - val_acc: 0.3657 Epoch 74/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9238 - acc: 0.4981 - val_loss: 2.5978 - val_acc: 0.3692 Epoch 75/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9148 - acc: 0.5040 - val_loss: 2.5806 - val_acc: 0.3723 Epoch 76/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9123 - acc: 0.5105 - val_loss: 2.5802 - val_acc: 0.3722 Epoch 77/200 300/300 [==============================] - 1s 3ms/step - loss: 1.8828 - acc: 0.5083 - val_loss: 2.5913 - val_acc: 0.3699 Epoch 78/200 300/300 [==============================] - 1s 3ms/step - loss: 1.9082 - acc: 0.4995 - val_loss: 2.5945 - val_acc: 0.3684 Epoch 79/200 300/300 [==============================] - 1s 3ms/step - loss: 1.8576 - acc: 0.5139 - val_loss: 2.5940 - val_acc: 0.3678 Epoch 80/200 300/300 [==============================] - 1s 3ms/step - loss: 1.8625 - acc: 0.5164 - val_loss: 2.5932 - val_acc: 0.3737 Epoch 81/200 300/300 [==============================] - 1s 3ms/step - loss: 1.8427 - acc: 0.5158 - val_loss: 2.5869 - val_acc: 0.3706 Epoch 82/200 300/300 [==============================] - 1s 3ms/step - loss: 1.8038 - acc: 0.5320 - val_loss: 2.5772 - val_acc: 0.3783 Epoch 83/200 300/300 [==============================] - 1s 3ms/step - loss: 1.8515 - acc: 0.5196 - val_loss: 2.5793 - val_acc: 0.3739 Epoch 84/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7746 - acc: 0.5340 - val_loss: 2.5991 - val_acc: 0.3745 Epoch 85/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7893 - acc: 0.5372 - val_loss: 2.5973 - val_acc: 0.3710 Epoch 86/200 300/300 [==============================] - 1s 3ms/step - loss: 1.8075 - acc: 0.5263 - val_loss: 2.5950 - val_acc: 0.3684 Epoch 87/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7449 - acc: 0.5467 - val_loss: 2.5935 - val_acc: 0.3762 Epoch 88/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7680 - acc: 0.5380 - val_loss: 2.6037 - val_acc: 0.3775 Epoch 89/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7305 - acc: 0.5464 - val_loss: 2.5987 - val_acc: 0.3773 Epoch 90/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7267 - acc: 0.5436 - val_loss: 2.6147 - val_acc: 0.3705 Epoch 91/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7325 - acc: 0.5478 - val_loss: 2.6099 - val_acc: 0.3724 Epoch 92/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6880 - acc: 0.5546 - val_loss: 2.6318 - val_acc: 0.3680 Epoch 93/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6854 - acc: 0.5566 - val_loss: 2.6020 - val_acc: 0.3728 Epoch 94/200 300/300 [==============================] - 1s 3ms/step - loss: 1.7061 - acc: 0.5522 - val_loss: 2.6108 - val_acc: 0.3742 Epoch 95/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6409 - acc: 0.5662 - val_loss: 2.6170 - val_acc: 0.3795 Epoch 96/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6723 - acc: 0.5629 - val_loss: 2.6299 - val_acc: 0.3777 Epoch 97/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6392 - acc: 0.5677 - val_loss: 2.6055 - val_acc: 0.3783 Epoch 98/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6268 - acc: 0.5755 - val_loss: 2.6312 - val_acc: 0.3762 Epoch 99/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6428 - acc: 0.5652 - val_loss: 2.6267 - val_acc: 0.3770 Epoch 100/200 300/300 [==============================] - 1s 3ms/step - loss: 1.5802 - acc: 0.5832 - val_loss: 2.6383 - val_acc: 0.3775 Epoch 101/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6049 - acc: 0.5826 - val_loss: 2.6364 - val_acc: 0.3781 Epoch 102/200 300/300 [==============================] - 1s 3ms/step - loss: 1.5764 - acc: 0.5844 - val_loss: 2.6453 - val_acc: 0.3751 Epoch 103/200 300/300 [==============================] - 1s 3ms/step - loss: 1.5490 - acc: 0.5888 - val_loss: 2.6487 - val_acc: 0.3788 Epoch 104/200 300/300 [==============================] - 1s 3ms/step - loss: 1.6038 - acc: 0.5764 - val_loss: 2.6352 - val_acc: 0.3813 Epoch 105/200 300/300 [==============================] - 1s 3ms/step - loss: 1.5280 - acc: 0.5946 - val_loss: 2.6736 - val_acc: 0.3707 Epoch 106/200 300/300 [==============================] - 1s 3ms/step - loss: 1.5241 - acc: 0.5965 - val_loss: 2.6764 - val_acc: 0.3764 Epoch 107/200 300/300 [==============================] - 1s 3ms/step - loss: 1.5636 - acc: 0.5865 - val_loss: 2.6890 - val_acc: 0.3763 Epoch 108/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4804 - acc: 0.6082 - val_loss: 2.6871 - val_acc: 0.3690 Epoch 109/200 300/300 [==============================] - 1s 3ms/step - loss: 1.5225 - acc: 0.5960 - val_loss: 2.6573 - val_acc: 0.3773 Epoch 110/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4747 - acc: 0.6092 - val_loss: 2.6817 - val_acc: 0.3820 Epoch 111/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4892 - acc: 0.6061 - val_loss: 2.6606 - val_acc: 0.3838 Epoch 112/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4854 - acc: 0.6052 - val_loss: 2.6836 - val_acc: 0.3755 Epoch 113/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4203 - acc: 0.6234 - val_loss: 2.6940 - val_acc: 0.3743 Epoch 114/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4524 - acc: 0.6143 - val_loss: 2.7123 - val_acc: 0.3764 Epoch 115/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4405 - acc: 0.6176 - val_loss: 2.7120 - val_acc: 0.3840 Epoch 116/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4142 - acc: 0.6236 - val_loss: 2.7214 - val_acc: 0.3724 Epoch 117/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4241 - acc: 0.6220 - val_loss: 2.7305 - val_acc: 0.3771 Epoch 118/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3824 - acc: 0.6328 - val_loss: 2.7337 - val_acc: 0.3712 Epoch 119/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3769 - acc: 0.6300 - val_loss: 2.7193 - val_acc: 0.3789 Epoch 120/200 300/300 [==============================] - 1s 3ms/step - loss: 1.4108 - acc: 0.6250 - val_loss: 2.7385 - val_acc: 0.3740 Epoch 121/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3384 - acc: 0.6435 - val_loss: 2.7799 - val_acc: 0.3682 Epoch 122/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3619 - acc: 0.6363 - val_loss: 2.7456 - val_acc: 0.3817 Epoch 123/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3416 - acc: 0.6386 - val_loss: 2.7436 - val_acc: 0.3807 Epoch 124/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3332 - acc: 0.6428 - val_loss: 2.7674 - val_acc: 0.3742 Epoch 125/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3322 - acc: 0.6415 - val_loss: 2.7651 - val_acc: 0.3740 Epoch 126/200 300/300 [==============================] - 1s 3ms/step - loss: 1.2773 - acc: 0.6563 - val_loss: 2.7769 - val_acc: 0.3761 Epoch 127/200 300/300 [==============================] - 1s 3ms/step - loss: 1.3079 - acc: 0.6511 - val_loss: 2.7822 - val_acc: 0.3745 Epoch 128/200 300/300 [==============================] - 1s 3ms/step - loss: 1.2857 - acc: 0.6563 - val_loss: 2.8036 - val_acc: 0.3761 Epoch 129/200 300/300 [==============================] - 1s 3ms/step - loss: 1.2577 - acc: 0.6594 - val_loss: 2.7968 - val_acc: 0.3745 Epoch 130/200 300/300 [==============================] - 1s 3ms/step - loss: 1.2753 - acc: 0.6586 - val_loss: 2.8037 - val_acc: 0.3738 Training time: 104.67989110946655
# plotting the learning curves
plot_results(history.history)
score = model_improved1.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 2.7119576930999756 Val accuracy: 0.3840000033378601
# Training model
import time
training_start = time.time()
history = model_improved1.fit(train_datagen.flow(X_train, y_train, batch_size=64),
steps_per_epoch=300,
epochs=200,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
Epoch 1/200 100/100 [==============================] - 4s 33ms/step - loss: 4.5737 - acc: 0.0225 - val_loss: 4.5234 - val_acc: 0.0286 Epoch 2/200 100/100 [==============================] - 3s 30ms/step - loss: 4.4665 - acc: 0.0364 - val_loss: 4.4148 - val_acc: 0.0372 Epoch 3/200 100/100 [==============================] - 3s 30ms/step - loss: 4.3664 - acc: 0.0433 - val_loss: 4.3060 - val_acc: 0.0628 Epoch 4/200 100/100 [==============================] - 3s 31ms/step - loss: 4.2484 - acc: 0.0658 - val_loss: 4.1929 - val_acc: 0.0733 Epoch 5/200 100/100 [==============================] - 3s 31ms/step - loss: 4.1604 - acc: 0.0770 - val_loss: 4.1197 - val_acc: 0.0825 Epoch 6/200 100/100 [==============================] - 3s 31ms/step - loss: 4.0698 - acc: 0.0809 - val_loss: 4.0503 - val_acc: 0.0965 Epoch 7/200 100/100 [==============================] - 3s 30ms/step - loss: 3.9997 - acc: 0.0983 - val_loss: 3.9679 - val_acc: 0.1044 Epoch 8/200 100/100 [==============================] - 3s 29ms/step - loss: 3.9532 - acc: 0.1095 - val_loss: 3.9276 - val_acc: 0.1148 Epoch 9/200 100/100 [==============================] - 3s 30ms/step - loss: 3.8956 - acc: 0.1202 - val_loss: 3.8896 - val_acc: 0.1245 Epoch 10/200 100/100 [==============================] - 3s 30ms/step - loss: 3.8634 - acc: 0.1241 - val_loss: 3.8388 - val_acc: 0.1347 Epoch 11/200 100/100 [==============================] - 3s 30ms/step - loss: 3.8144 - acc: 0.1228 - val_loss: 3.7756 - val_acc: 0.1471 Epoch 12/200 100/100 [==============================] - 3s 31ms/step - loss: 3.7729 - acc: 0.1391 - val_loss: 3.7560 - val_acc: 0.1461 Epoch 13/200 100/100 [==============================] - 3s 30ms/step - loss: 3.7216 - acc: 0.1480 - val_loss: 3.7271 - val_acc: 0.1534 Epoch 14/200 100/100 [==============================] - 3s 30ms/step - loss: 3.6971 - acc: 0.1550 - val_loss: 3.6812 - val_acc: 0.1555 Epoch 15/200 100/100 [==============================] - 3s 30ms/step - loss: 3.6557 - acc: 0.1609 - val_loss: 3.6740 - val_acc: 0.1579 Epoch 16/200 100/100 [==============================] - 3s 30ms/step - loss: 3.6665 - acc: 0.1481 - val_loss: 3.6384 - val_acc: 0.1656 Epoch 17/200 100/100 [==============================] - 3s 30ms/step - loss: 3.6347 - acc: 0.1623 - val_loss: 3.6307 - val_acc: 0.1652 Epoch 18/200 100/100 [==============================] - 3s 30ms/step - loss: 3.6011 - acc: 0.1645 - val_loss: 3.6354 - val_acc: 0.1628 Epoch 19/200 100/100 [==============================] - 3s 29ms/step - loss: 3.5835 - acc: 0.1650 - val_loss: 3.5701 - val_acc: 0.1739 Epoch 20/200 100/100 [==============================] - 3s 30ms/step - loss: 3.5647 - acc: 0.1713 - val_loss: 3.5413 - val_acc: 0.1772 Epoch 21/200 100/100 [==============================] - 3s 30ms/step - loss: 3.5233 - acc: 0.1739 - val_loss: 3.5426 - val_acc: 0.1774 Epoch 22/200 100/100 [==============================] - 3s 30ms/step - loss: 3.5192 - acc: 0.1761 - val_loss: 3.4985 - val_acc: 0.1841 Epoch 23/200 100/100 [==============================] - 3s 30ms/step - loss: 3.4572 - acc: 0.1870 - val_loss: 3.4883 - val_acc: 0.1895 Epoch 24/200 100/100 [==============================] - 3s 29ms/step - loss: 3.4354 - acc: 0.1984 - val_loss: 3.4958 - val_acc: 0.1853 Epoch 25/200 100/100 [==============================] - 3s 30ms/step - loss: 3.4378 - acc: 0.1945 - val_loss: 3.4473 - val_acc: 0.1956 Epoch 26/200 100/100 [==============================] - 3s 30ms/step - loss: 3.4164 - acc: 0.1947 - val_loss: 3.4330 - val_acc: 0.1992 Epoch 27/200 100/100 [==============================] - 3s 30ms/step - loss: 3.3882 - acc: 0.2058 - val_loss: 3.4344 - val_acc: 0.1987 Epoch 28/200 100/100 [==============================] - 3s 30ms/step - loss: 3.4060 - acc: 0.2021 - val_loss: 3.3792 - val_acc: 0.2115 Epoch 29/200 100/100 [==============================] - 3s 30ms/step - loss: 3.3608 - acc: 0.2042 - val_loss: 3.3942 - val_acc: 0.2084 Epoch 30/200 100/100 [==============================] - 3s 30ms/step - loss: 3.3462 - acc: 0.2105 - val_loss: 3.3746 - val_acc: 0.2101 Epoch 31/200 100/100 [==============================] - 3s 30ms/step - loss: 3.3329 - acc: 0.2130 - val_loss: 3.3480 - val_acc: 0.2163 Epoch 32/200 100/100 [==============================] - 3s 30ms/step - loss: 3.3197 - acc: 0.2184 - val_loss: 3.3340 - val_acc: 0.2146 Epoch 33/200 100/100 [==============================] - 3s 30ms/step - loss: 3.3223 - acc: 0.2056 - val_loss: 3.3524 - val_acc: 0.2147 Epoch 34/200 100/100 [==============================] - 3s 31ms/step - loss: 3.2855 - acc: 0.2178 - val_loss: 3.3352 - val_acc: 0.2170 Epoch 35/200 100/100 [==============================] - 3s 30ms/step - loss: 3.2892 - acc: 0.2191 - val_loss: 3.2919 - val_acc: 0.2203 Epoch 36/200 100/100 [==============================] - 3s 30ms/step - loss: 3.2497 - acc: 0.2292 - val_loss: 3.3394 - val_acc: 0.2150 Epoch 37/200 100/100 [==============================] - 3s 30ms/step - loss: 3.2430 - acc: 0.2236 - val_loss: 3.2921 - val_acc: 0.2253 Epoch 38/200 100/100 [==============================] - 3s 29ms/step - loss: 3.2591 - acc: 0.2211 - val_loss: 3.2674 - val_acc: 0.2254 Epoch 39/200 100/100 [==============================] - 3s 30ms/step - loss: 3.2212 - acc: 0.2220 - val_loss: 3.2481 - val_acc: 0.2310 Epoch 40/200 100/100 [==============================] - 3s 30ms/step - loss: 3.2308 - acc: 0.2317 - val_loss: 3.2488 - val_acc: 0.2326 Epoch 41/200 100/100 [==============================] - 3s 30ms/step - loss: 3.2051 - acc: 0.2356 - val_loss: 3.2730 - val_acc: 0.2286 Epoch 42/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1955 - acc: 0.2372 - val_loss: 3.2186 - val_acc: 0.2419 Epoch 43/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1886 - acc: 0.2350 - val_loss: 3.2260 - val_acc: 0.2373 Epoch 44/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1977 - acc: 0.2291 - val_loss: 3.1839 - val_acc: 0.2456 Epoch 45/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1471 - acc: 0.2427 - val_loss: 3.1885 - val_acc: 0.2431 Epoch 46/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1947 - acc: 0.2331 - val_loss: 3.2061 - val_acc: 0.2405 Epoch 47/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1572 - acc: 0.2411 - val_loss: 3.2066 - val_acc: 0.2411 Epoch 48/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1274 - acc: 0.2520 - val_loss: 3.1855 - val_acc: 0.2453 Epoch 49/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1432 - acc: 0.2369 - val_loss: 3.1657 - val_acc: 0.2484 Epoch 50/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1235 - acc: 0.2475 - val_loss: 3.1520 - val_acc: 0.2536 Epoch 51/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0899 - acc: 0.2527 - val_loss: 3.1372 - val_acc: 0.2534 Epoch 52/200 100/100 [==============================] - 3s 30ms/step - loss: 3.1047 - acc: 0.2569 - val_loss: 3.1726 - val_acc: 0.2495 Epoch 53/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0698 - acc: 0.2594 - val_loss: 3.1164 - val_acc: 0.2581 Epoch 54/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0949 - acc: 0.2544 - val_loss: 3.1289 - val_acc: 0.2518 Epoch 55/200 100/100 [==============================] - 3s 29ms/step - loss: 3.0196 - acc: 0.2703 - val_loss: 3.1227 - val_acc: 0.2534 Epoch 56/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0588 - acc: 0.2591 - val_loss: 3.1311 - val_acc: 0.2549 Epoch 57/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0384 - acc: 0.2656 - val_loss: 3.1178 - val_acc: 0.2558 Epoch 58/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0462 - acc: 0.2650 - val_loss: 3.0894 - val_acc: 0.2632 Epoch 59/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0647 - acc: 0.2537 - val_loss: 3.0894 - val_acc: 0.2662 Epoch 60/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0597 - acc: 0.2631 - val_loss: 3.0853 - val_acc: 0.2618 Epoch 61/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0585 - acc: 0.2623 - val_loss: 3.0654 - val_acc: 0.2702 Epoch 62/200 100/100 [==============================] - 3s 30ms/step - loss: 3.0490 - acc: 0.2650 - val_loss: 3.0741 - val_acc: 0.2664 Epoch 63/200 100/100 [==============================] - 3s 31ms/step - loss: 3.0117 - acc: 0.2678 - val_loss: 3.0593 - val_acc: 0.2704 Epoch 64/200 100/100 [==============================] - 3s 31ms/step - loss: 2.9896 - acc: 0.2714 - val_loss: 3.0743 - val_acc: 0.2643 Epoch 65/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9955 - acc: 0.2675 - val_loss: 3.0433 - val_acc: 0.2697 Epoch 66/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9735 - acc: 0.2717 - val_loss: 3.0613 - val_acc: 0.2658 Epoch 67/200 100/100 [==============================] - 3s 29ms/step - loss: 2.9870 - acc: 0.2724 - val_loss: 3.0576 - val_acc: 0.2648 Epoch 68/200 100/100 [==============================] - 3s 31ms/step - loss: 3.0100 - acc: 0.2664 - val_loss: 3.0332 - val_acc: 0.2735 Epoch 69/200 100/100 [==============================] - 3s 31ms/step - loss: 2.9766 - acc: 0.2761 - val_loss: 3.0337 - val_acc: 0.2690 Epoch 70/200 100/100 [==============================] - 3s 31ms/step - loss: 2.9609 - acc: 0.2731 - val_loss: 3.0192 - val_acc: 0.2737 Epoch 71/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9415 - acc: 0.2847 - val_loss: 3.0177 - val_acc: 0.2743 Epoch 72/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9564 - acc: 0.2777 - val_loss: 2.9988 - val_acc: 0.2810 Epoch 73/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9447 - acc: 0.2812 - val_loss: 2.9889 - val_acc: 0.2769 Epoch 74/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9457 - acc: 0.2763 - val_loss: 2.9941 - val_acc: 0.2767 Epoch 75/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9066 - acc: 0.2872 - val_loss: 2.9869 - val_acc: 0.2770 Epoch 76/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9423 - acc: 0.2823 - val_loss: 2.9805 - val_acc: 0.2788 Epoch 77/200 100/100 [==============================] - 3s 29ms/step - loss: 2.9223 - acc: 0.2886 - val_loss: 2.9940 - val_acc: 0.2759 Epoch 78/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9196 - acc: 0.2898 - val_loss: 2.9786 - val_acc: 0.2806 Epoch 79/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8943 - acc: 0.2827 - val_loss: 3.0535 - val_acc: 0.2718 Epoch 80/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9057 - acc: 0.2867 - val_loss: 3.0168 - val_acc: 0.2740 Epoch 81/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9022 - acc: 0.2856 - val_loss: 2.9797 - val_acc: 0.2812 Epoch 82/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9224 - acc: 0.2808 - val_loss: 2.9753 - val_acc: 0.2858 Epoch 83/200 100/100 [==============================] - 3s 30ms/step - loss: 2.9004 - acc: 0.2836 - val_loss: 2.9623 - val_acc: 0.2896 Epoch 84/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8467 - acc: 0.2970 - val_loss: 2.9344 - val_acc: 0.2941 Epoch 85/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8828 - acc: 0.2995 - val_loss: 2.9492 - val_acc: 0.2951 Epoch 86/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8662 - acc: 0.2975 - val_loss: 2.9767 - val_acc: 0.2837 Epoch 87/200 100/100 [==============================] - 3s 29ms/step - loss: 2.8593 - acc: 0.2947 - val_loss: 2.9295 - val_acc: 0.2892 Epoch 88/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8607 - acc: 0.2937 - val_loss: 2.9384 - val_acc: 0.2944 Epoch 89/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8234 - acc: 0.3017 - val_loss: 2.9361 - val_acc: 0.2881 Epoch 90/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8300 - acc: 0.3019 - val_loss: 2.9790 - val_acc: 0.2855 Epoch 91/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8149 - acc: 0.2967 - val_loss: 2.9151 - val_acc: 0.2933 Epoch 92/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8111 - acc: 0.3012 - val_loss: 2.9541 - val_acc: 0.2907 Epoch 93/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7887 - acc: 0.3019 - val_loss: 2.9348 - val_acc: 0.2884 Epoch 94/200 100/100 [==============================] - 3s 31ms/step - loss: 2.8302 - acc: 0.2859 - val_loss: 2.9034 - val_acc: 0.2974 Epoch 95/200 100/100 [==============================] - 3s 31ms/step - loss: 2.8077 - acc: 0.3043 - val_loss: 2.8818 - val_acc: 0.2996 Epoch 96/200 100/100 [==============================] - 3s 31ms/step - loss: 2.7604 - acc: 0.3225 - val_loss: 2.8742 - val_acc: 0.3014 Epoch 97/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8226 - acc: 0.2961 - val_loss: 2.9231 - val_acc: 0.2960 Epoch 98/200 100/100 [==============================] - 3s 30ms/step - loss: 2.8007 - acc: 0.3114 - val_loss: 2.8865 - val_acc: 0.3009 Epoch 99/200 100/100 [==============================] - 3s 31ms/step - loss: 2.7955 - acc: 0.3005 - val_loss: 2.9263 - val_acc: 0.2935 Epoch 100/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7944 - acc: 0.3042 - val_loss: 2.8760 - val_acc: 0.3023 Epoch 101/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7575 - acc: 0.3161 - val_loss: 2.8910 - val_acc: 0.2986 Epoch 102/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7648 - acc: 0.3184 - val_loss: 2.8471 - val_acc: 0.3083 Epoch 103/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7672 - acc: 0.3111 - val_loss: 2.8598 - val_acc: 0.3049 Epoch 104/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7289 - acc: 0.3120 - val_loss: 2.8639 - val_acc: 0.3020 Epoch 105/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7591 - acc: 0.3169 - val_loss: 2.8449 - val_acc: 0.3083 Epoch 106/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7467 - acc: 0.3080 - val_loss: 2.8284 - val_acc: 0.3121 Epoch 107/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7415 - acc: 0.3187 - val_loss: 2.8306 - val_acc: 0.3113 Epoch 108/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7429 - acc: 0.3106 - val_loss: 2.8632 - val_acc: 0.3052 Epoch 109/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7250 - acc: 0.3206 - val_loss: 2.8403 - val_acc: 0.3071 Epoch 110/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7151 - acc: 0.3184 - val_loss: 2.9121 - val_acc: 0.2908 Epoch 111/200 100/100 [==============================] - 3s 31ms/step - loss: 2.7563 - acc: 0.3227 - val_loss: 2.8868 - val_acc: 0.2970 Epoch 112/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7079 - acc: 0.3308 - val_loss: 2.8284 - val_acc: 0.3086 Epoch 113/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7030 - acc: 0.3264 - val_loss: 2.8192 - val_acc: 0.3170 Epoch 114/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6877 - acc: 0.3245 - val_loss: 2.8177 - val_acc: 0.3157 Epoch 115/200 100/100 [==============================] - 3s 30ms/step - loss: 2.7129 - acc: 0.3173 - val_loss: 2.8183 - val_acc: 0.3088 Epoch 116/200 100/100 [==============================] - 3s 31ms/step - loss: 2.7124 - acc: 0.3253 - val_loss: 2.8250 - val_acc: 0.3092 Epoch 117/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6753 - acc: 0.3217 - val_loss: 2.8327 - val_acc: 0.3114 Epoch 118/200 100/100 [==============================] - 3s 31ms/step - loss: 2.7017 - acc: 0.3292 - val_loss: 2.7890 - val_acc: 0.3182 Epoch 119/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6543 - acc: 0.3283 - val_loss: 2.8053 - val_acc: 0.3181 Epoch 120/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6547 - acc: 0.3427 - val_loss: 2.8152 - val_acc: 0.3151 Epoch 121/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6941 - acc: 0.3334 - val_loss: 2.8014 - val_acc: 0.3152 Epoch 122/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6757 - acc: 0.3392 - val_loss: 2.7949 - val_acc: 0.3196 Epoch 123/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6558 - acc: 0.3303 - val_loss: 2.8659 - val_acc: 0.3100 Epoch 124/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6323 - acc: 0.3411 - val_loss: 2.7664 - val_acc: 0.3220 Epoch 125/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6636 - acc: 0.3291 - val_loss: 2.7740 - val_acc: 0.3222 Epoch 126/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6710 - acc: 0.3338 - val_loss: 2.8298 - val_acc: 0.3150 Epoch 127/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6678 - acc: 0.3297 - val_loss: 2.7957 - val_acc: 0.3170 Epoch 128/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6502 - acc: 0.3452 - val_loss: 2.7694 - val_acc: 0.3231 Epoch 129/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6072 - acc: 0.3430 - val_loss: 2.7612 - val_acc: 0.3209 Epoch 130/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6227 - acc: 0.3381 - val_loss: 2.7757 - val_acc: 0.3243 Epoch 131/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6208 - acc: 0.3377 - val_loss: 2.7591 - val_acc: 0.3235 Epoch 132/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6244 - acc: 0.3397 - val_loss: 2.7417 - val_acc: 0.3249 Epoch 133/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6076 - acc: 0.3386 - val_loss: 2.7676 - val_acc: 0.3212 Epoch 134/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6110 - acc: 0.3434 - val_loss: 2.7547 - val_acc: 0.3244 Epoch 135/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6412 - acc: 0.3392 - val_loss: 2.7643 - val_acc: 0.3214 Epoch 136/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5803 - acc: 0.3530 - val_loss: 2.7488 - val_acc: 0.3228 Epoch 137/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5965 - acc: 0.3462 - val_loss: 2.7573 - val_acc: 0.3242 Epoch 138/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6051 - acc: 0.3433 - val_loss: 2.7721 - val_acc: 0.3216 Epoch 139/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5710 - acc: 0.3478 - val_loss: 2.7525 - val_acc: 0.3249 Epoch 140/200 100/100 [==============================] - 3s 30ms/step - loss: 2.6089 - acc: 0.3466 - val_loss: 2.7539 - val_acc: 0.3270 Epoch 141/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5557 - acc: 0.3498 - val_loss: 2.7933 - val_acc: 0.3207 Epoch 142/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5702 - acc: 0.3569 - val_loss: 2.7857 - val_acc: 0.3193 Epoch 143/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5505 - acc: 0.3534 - val_loss: 2.7358 - val_acc: 0.3302 Epoch 144/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5747 - acc: 0.3455 - val_loss: 2.7247 - val_acc: 0.3317 Epoch 145/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5378 - acc: 0.3656 - val_loss: 2.7551 - val_acc: 0.3245 Epoch 146/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5541 - acc: 0.3592 - val_loss: 2.7397 - val_acc: 0.3299 Epoch 147/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5444 - acc: 0.3587 - val_loss: 2.7010 - val_acc: 0.3363 Epoch 148/200 100/100 [==============================] - 3s 29ms/step - loss: 2.5409 - acc: 0.3533 - val_loss: 2.7436 - val_acc: 0.3307 Epoch 149/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5625 - acc: 0.3491 - val_loss: 2.7284 - val_acc: 0.3304 Epoch 150/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5308 - acc: 0.3628 - val_loss: 2.6977 - val_acc: 0.3374 Epoch 151/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5356 - acc: 0.3566 - val_loss: 2.7217 - val_acc: 0.3319 Epoch 152/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5631 - acc: 0.3575 - val_loss: 2.7576 - val_acc: 0.3287 Epoch 153/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5243 - acc: 0.3653 - val_loss: 2.6941 - val_acc: 0.3381 Epoch 154/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5348 - acc: 0.3602 - val_loss: 2.7201 - val_acc: 0.3319 Epoch 155/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5289 - acc: 0.3541 - val_loss: 2.6775 - val_acc: 0.3452 Epoch 156/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4635 - acc: 0.3677 - val_loss: 2.6574 - val_acc: 0.3435 Epoch 157/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5338 - acc: 0.3587 - val_loss: 2.7059 - val_acc: 0.3354 Epoch 158/200 100/100 [==============================] - 3s 30ms/step - loss: 2.5302 - acc: 0.3627 - val_loss: 2.7418 - val_acc: 0.3319 Epoch 159/200 100/100 [==============================] - 3s 29ms/step - loss: 2.5065 - acc: 0.3602 - val_loss: 2.6775 - val_acc: 0.3385 Epoch 160/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4786 - acc: 0.3723 - val_loss: 2.7159 - val_acc: 0.3327 Epoch 161/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4937 - acc: 0.3655 - val_loss: 2.6812 - val_acc: 0.3420 Epoch 162/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4731 - acc: 0.3666 - val_loss: 2.6755 - val_acc: 0.3433 Epoch 163/200 100/100 [==============================] - 3s 29ms/step - loss: 2.4612 - acc: 0.3752 - val_loss: 2.6724 - val_acc: 0.3440 Epoch 164/200 100/100 [==============================] - 3s 29ms/step - loss: 2.4705 - acc: 0.3767 - val_loss: 2.6848 - val_acc: 0.3426 Epoch 165/200 100/100 [==============================] - 3s 29ms/step - loss: 2.4650 - acc: 0.3752 - val_loss: 2.6936 - val_acc: 0.3406 Epoch 166/200 100/100 [==============================] - 3s 29ms/step - loss: 2.4863 - acc: 0.3716 - val_loss: 2.6663 - val_acc: 0.3414 Epoch 167/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4900 - acc: 0.3725 - val_loss: 2.6782 - val_acc: 0.3439 Epoch 168/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4645 - acc: 0.3750 - val_loss: 2.6865 - val_acc: 0.3424 Epoch 169/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4714 - acc: 0.3669 - val_loss: 2.7274 - val_acc: 0.3317 Epoch 170/200 100/100 [==============================] - 3s 30ms/step - loss: 2.4413 - acc: 0.3803 - val_loss: 2.7026 - val_acc: 0.3374 Training time: 515.6730082035065
# plotting the learning curves
plot_results(history.history)
score = model_improved1.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 2.67753529548645 Val accuracy: 0.3452000021934509
# Training model
import time
training_start = time.time()
history = model_improved1.fit(train_datagen.flow(X_train_aug, y_train_aug, batch_size=64),
steps_per_epoch=300,
epochs=200,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
Epoch 1/200 300/300 [==============================] - 82s 224ms/step - loss: 4.4831 - acc: 0.0327 - val_loss: 4.2521 - val_acc: 0.0676 Epoch 2/200 300/300 [==============================] - 65s 215ms/step - loss: 4.0763 - acc: 0.0851 - val_loss: 3.9153 - val_acc: 0.1134 Epoch 3/200 300/300 [==============================] - 65s 215ms/step - loss: 3.8566 - acc: 0.1183 - val_loss: 3.7583 - val_acc: 0.1348 Epoch 4/200 300/300 [==============================] - 65s 217ms/step - loss: 3.7039 - acc: 0.1453 - val_loss: 3.6531 - val_acc: 0.1545 Epoch 5/200 300/300 [==============================] - 65s 215ms/step - loss: 3.6015 - acc: 0.1640 - val_loss: 3.5393 - val_acc: 0.1693 Epoch 6/200 300/300 [==============================] - 65s 215ms/step - loss: 3.5170 - acc: 0.1746 - val_loss: 3.4699 - val_acc: 0.1890 Epoch 7/200 300/300 [==============================] - 65s 215ms/step - loss: 3.4499 - acc: 0.1884 - val_loss: 3.4213 - val_acc: 0.1906 Epoch 8/200 300/300 [==============================] - 65s 217ms/step - loss: 3.3942 - acc: 0.1953 - val_loss: 3.3745 - val_acc: 0.2005 Epoch 9/200 300/300 [==============================] - 64s 214ms/step - loss: 3.3354 - acc: 0.2083 - val_loss: 3.3225 - val_acc: 0.2131 Epoch 10/200 300/300 [==============================] - 65s 216ms/step - loss: 3.3129 - acc: 0.2122 - val_loss: 3.2982 - val_acc: 0.2175 Epoch 11/200 300/300 [==============================] - 65s 215ms/step - loss: 3.2445 - acc: 0.2252 - val_loss: 3.2656 - val_acc: 0.2259 Epoch 12/200 300/300 [==============================] - 65s 215ms/step - loss: 3.2347 - acc: 0.2210 - val_loss: 3.2062 - val_acc: 0.2349 Epoch 13/200 300/300 [==============================] - 64s 214ms/step - loss: 3.1990 - acc: 0.2258 - val_loss: 3.2020 - val_acc: 0.2342 Epoch 14/200 300/300 [==============================] - 65s 215ms/step - loss: 3.1732 - acc: 0.2321 - val_loss: 3.1937 - val_acc: 0.2420 Epoch 15/200 300/300 [==============================] - 65s 215ms/step - loss: 3.1494 - acc: 0.2447 - val_loss: 3.1586 - val_acc: 0.2419 Epoch 16/200 300/300 [==============================] - 65s 216ms/step - loss: 3.1080 - acc: 0.2476 - val_loss: 3.1140 - val_acc: 0.2585 Epoch 17/200 300/300 [==============================] - 64s 214ms/step - loss: 3.0784 - acc: 0.2532 - val_loss: 3.1024 - val_acc: 0.2550 Epoch 18/200 300/300 [==============================] - 64s 213ms/step - loss: 3.0606 - acc: 0.2555 - val_loss: 3.1052 - val_acc: 0.2504 Epoch 19/200 300/300 [==============================] - 65s 216ms/step - loss: 3.0284 - acc: 0.2608 - val_loss: 3.0580 - val_acc: 0.2646 Epoch 20/200 300/300 [==============================] - 64s 215ms/step - loss: 3.0146 - acc: 0.2664 - val_loss: 3.0315 - val_acc: 0.2678 Epoch 21/200 300/300 [==============================] - 65s 215ms/step - loss: 2.9833 - acc: 0.2709 - val_loss: 3.0447 - val_acc: 0.2670 Epoch 22/200 300/300 [==============================] - 65s 216ms/step - loss: 2.9741 - acc: 0.2738 - val_loss: 2.9945 - val_acc: 0.2777 Epoch 23/200 300/300 [==============================] - 64s 214ms/step - loss: 2.9448 - acc: 0.2832 - val_loss: 2.9758 - val_acc: 0.2787 Epoch 24/200 300/300 [==============================] - 64s 214ms/step - loss: 2.9322 - acc: 0.2795 - val_loss: 2.9770 - val_acc: 0.2808 Epoch 25/200 300/300 [==============================] - 64s 214ms/step - loss: 2.8996 - acc: 0.2917 - val_loss: 2.9371 - val_acc: 0.2875 Epoch 26/200 300/300 [==============================] - 64s 214ms/step - loss: 2.8991 - acc: 0.2898 - val_loss: 2.9575 - val_acc: 0.2826 Epoch 27/200 300/300 [==============================] - 65s 215ms/step - loss: 2.8889 - acc: 0.2905 - val_loss: 2.9672 - val_acc: 0.2796 Epoch 28/200 300/300 [==============================] - 65s 215ms/step - loss: 2.8650 - acc: 0.2962 - val_loss: 2.9446 - val_acc: 0.2884 Epoch 29/200 300/300 [==============================] - 65s 215ms/step - loss: 2.8474 - acc: 0.2989 - val_loss: 2.8940 - val_acc: 0.2982 Epoch 30/200 300/300 [==============================] - 64s 215ms/step - loss: 2.8289 - acc: 0.3022 - val_loss: 2.8911 - val_acc: 0.2946 Epoch 31/200 300/300 [==============================] - 64s 214ms/step - loss: 2.8147 - acc: 0.3023 - val_loss: 2.8790 - val_acc: 0.2948 Epoch 32/200 300/300 [==============================] - 65s 216ms/step - loss: 2.7967 - acc: 0.3104 - val_loss: 2.8790 - val_acc: 0.2983 Epoch 33/200 300/300 [==============================] - 65s 215ms/step - loss: 2.7807 - acc: 0.3125 - val_loss: 2.8600 - val_acc: 0.3013 Epoch 34/200 300/300 [==============================] - 65s 216ms/step - loss: 2.7539 - acc: 0.3189 - val_loss: 2.8491 - val_acc: 0.3052 Epoch 35/200 300/300 [==============================] - 65s 216ms/step - loss: 2.7415 - acc: 0.3213 - val_loss: 2.8170 - val_acc: 0.3052 Epoch 36/200 300/300 [==============================] - 64s 213ms/step - loss: 2.7239 - acc: 0.3233 - val_loss: 2.8283 - val_acc: 0.3022 Epoch 37/200 300/300 [==============================] - 65s 215ms/step - loss: 2.6948 - acc: 0.3311 - val_loss: 2.8597 - val_acc: 0.2986 Epoch 38/200 300/300 [==============================] - 65s 215ms/step - loss: 2.7069 - acc: 0.3248 - val_loss: 2.7863 - val_acc: 0.3152 Epoch 39/200 300/300 [==============================] - 64s 213ms/step - loss: 2.6988 - acc: 0.3253 - val_loss: 2.8039 - val_acc: 0.3068 Epoch 40/200 300/300 [==============================] - 66s 219ms/step - loss: 2.6705 - acc: 0.3319 - val_loss: 2.7915 - val_acc: 0.3135 Epoch 41/200 300/300 [==============================] - 64s 214ms/step - loss: 2.6532 - acc: 0.3346 - val_loss: 2.7743 - val_acc: 0.3200 Epoch 42/200 300/300 [==============================] - 65s 215ms/step - loss: 2.6448 - acc: 0.3374 - val_loss: 2.7327 - val_acc: 0.3261 Epoch 43/200 300/300 [==============================] - 65s 217ms/step - loss: 2.6342 - acc: 0.3412 - val_loss: 2.7473 - val_acc: 0.3266 Epoch 44/200 300/300 [==============================] - 65s 216ms/step - loss: 2.6174 - acc: 0.3436 - val_loss: 2.7529 - val_acc: 0.3224 Epoch 45/200 300/300 [==============================] - 64s 214ms/step - loss: 2.6120 - acc: 0.3458 - val_loss: 2.7456 - val_acc: 0.3206 Epoch 46/200 300/300 [==============================] - 64s 214ms/step - loss: 2.6176 - acc: 0.3450 - val_loss: 2.7235 - val_acc: 0.3247 Epoch 47/200 300/300 [==============================] - 65s 215ms/step - loss: 2.6047 - acc: 0.3479 - val_loss: 2.7129 - val_acc: 0.3242 Epoch 48/200 300/300 [==============================] - 65s 216ms/step - loss: 2.5878 - acc: 0.3539 - val_loss: 2.7359 - val_acc: 0.3279 Epoch 49/200 300/300 [==============================] - 65s 215ms/step - loss: 2.5795 - acc: 0.3477 - val_loss: 2.7085 - val_acc: 0.3337 Epoch 50/200 300/300 [==============================] - 64s 213ms/step - loss: 2.5546 - acc: 0.3583 - val_loss: 2.7237 - val_acc: 0.3269 Epoch 51/200 300/300 [==============================] - 65s 216ms/step - loss: 2.5431 - acc: 0.3571 - val_loss: 2.6915 - val_acc: 0.3306 Epoch 52/200 300/300 [==============================] - 65s 215ms/step - loss: 2.5337 - acc: 0.3624 - val_loss: 2.6881 - val_acc: 0.3324 Epoch 54/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4933 - acc: 0.3691 - val_loss: 2.6877 - val_acc: 0.3363 Epoch 55/200 300/300 [==============================] - 64s 214ms/step - loss: 2.4935 - acc: 0.3680 - val_loss: 2.6446 - val_acc: 0.3474 Epoch 56/200 300/300 [==============================] - 64s 214ms/step - loss: 2.4982 - acc: 0.3692 - val_loss: 2.6480 - val_acc: 0.3442 Epoch 57/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4904 - acc: 0.3724 - val_loss: 2.6963 - val_acc: 0.3370 Epoch 58/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4623 - acc: 0.3735 - val_loss: 2.6818 - val_acc: 0.3393 Epoch 59/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4621 - acc: 0.3757 - val_loss: 2.6494 - val_acc: 0.3431 Epoch 60/200 300/300 [==============================] - 65s 216ms/step - loss: 2.4545 - acc: 0.3753 - val_loss: 2.6682 - val_acc: 0.3431 Epoch 61/200 300/300 [==============================] - 65s 216ms/step - loss: 2.4747 - acc: 0.3723 - val_loss: 2.6297 - val_acc: 0.3449 Epoch 62/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4294 - acc: 0.3845 - val_loss: 2.6482 - val_acc: 0.3472 Epoch 63/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4446 - acc: 0.3800 - val_loss: 2.6570 - val_acc: 0.3442 Epoch 64/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4112 - acc: 0.3891 - val_loss: 2.6142 - val_acc: 0.3582 Epoch 65/200 300/300 [==============================] - 64s 215ms/step - loss: 2.4214 - acc: 0.3848 - val_loss: 2.6149 - val_acc: 0.3533 Epoch 66/200 300/300 [==============================] - 64s 214ms/step - loss: 2.3945 - acc: 0.3854 - val_loss: 2.6902 - val_acc: 0.3394 Epoch 67/200 300/300 [==============================] - 65s 216ms/step - loss: 2.4043 - acc: 0.3834 - val_loss: 2.6198 - val_acc: 0.3505 Epoch 68/200 300/300 [==============================] - 65s 215ms/step - loss: 2.4035 - acc: 0.3901 - val_loss: 2.6048 - val_acc: 0.3549 Epoch 69/200 300/300 [==============================] - 65s 215ms/step - loss: 2.3732 - acc: 0.3893 - val_loss: 2.6136 - val_acc: 0.3568 Epoch 70/200 300/300 [==============================] - 64s 214ms/step - loss: 2.3699 - acc: 0.3943 - val_loss: 2.6211 - val_acc: 0.3484 Epoch 71/200 300/300 [==============================] - 64s 213ms/step - loss: 2.3618 - acc: 0.3988 - val_loss: 2.6500 - val_acc: 0.3504 Epoch 72/200 300/300 [==============================] - 65s 215ms/step - loss: 2.3761 - acc: 0.3965 - val_loss: 2.5732 - val_acc: 0.3635 Epoch 73/200 300/300 [==============================] - 64s 214ms/step - loss: 2.3573 - acc: 0.3968 - val_loss: 2.5966 - val_acc: 0.3560 Epoch 74/200 300/300 [==============================] - 65s 215ms/step - loss: 2.3474 - acc: 0.3967 - val_loss: 2.5969 - val_acc: 0.3537 Epoch 75/200 300/300 [==============================] - 64s 215ms/step - loss: 2.3394 - acc: 0.3991 - val_loss: 2.5690 - val_acc: 0.3614 Epoch 76/200 300/300 [==============================] - 65s 215ms/step - loss: 2.3304 - acc: 0.4060 - val_loss: 2.5800 - val_acc: 0.3563 Epoch 77/200 300/300 [==============================] - 64s 214ms/step - loss: 2.3066 - acc: 0.4046 - val_loss: 2.5884 - val_acc: 0.3634 Epoch 78/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2994 - acc: 0.4057 - val_loss: 2.5769 - val_acc: 0.3639 Epoch 79/200 300/300 [==============================] - 64s 214ms/step - loss: 2.3065 - acc: 0.4075 - val_loss: 2.5920 - val_acc: 0.3614 Epoch 80/200 300/300 [==============================] - 64s 213ms/step - loss: 2.2822 - acc: 0.4131 - val_loss: 2.5988 - val_acc: 0.3586 Epoch 81/200 300/300 [==============================] - 65s 215ms/step - loss: 2.2960 - acc: 0.4117 - val_loss: 2.5659 - val_acc: 0.3644 Epoch 82/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2864 - acc: 0.4130 - val_loss: 2.5762 - val_acc: 0.3655 Epoch 83/200 300/300 [==============================] - 66s 219ms/step - loss: 2.2840 - acc: 0.4102 - val_loss: 2.5623 - val_acc: 0.3695 Epoch 84/200 300/300 [==============================] - 65s 215ms/step - loss: 2.2971 - acc: 0.4089 - val_loss: 2.5476 - val_acc: 0.3720 Epoch 85/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2847 - acc: 0.4137 - val_loss: 2.5657 - val_acc: 0.3676 Epoch 86/200 300/300 [==============================] - 65s 215ms/step - loss: 2.2533 - acc: 0.4234 - val_loss: 2.5352 - val_acc: 0.3695 Epoch 87/200 300/300 [==============================] - 65s 215ms/step - loss: 2.2525 - acc: 0.4178 - val_loss: 2.5482 - val_acc: 0.3682 Epoch 88/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2648 - acc: 0.4191 - val_loss: 2.5395 - val_acc: 0.3684 Epoch 89/200 300/300 [==============================] - 65s 215ms/step - loss: 2.2414 - acc: 0.4195 - val_loss: 2.5328 - val_acc: 0.3717 Epoch 90/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2434 - acc: 0.4220 - val_loss: 2.6450 - val_acc: 0.3516 Epoch 91/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2337 - acc: 0.4235 - val_loss: 2.5152 - val_acc: 0.3747 Epoch 92/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2440 - acc: 0.4226 - val_loss: 2.5529 - val_acc: 0.3696 Epoch 93/200 300/300 [==============================] - 64s 214ms/step - loss: 2.2312 - acc: 0.4281 - val_loss: 2.5170 - val_acc: 0.3735 Epoch 94/200 300/300 [==============================] - 64s 215ms/step - loss: 2.2209 - acc: 0.4266 - val_loss: 2.5460 - val_acc: 0.3739 Epoch 95/200 300/300 [==============================] - 65s 217ms/step - loss: 2.2162 - acc: 0.4244 - val_loss: 2.5342 - val_acc: 0.3732 Epoch 96/200 300/300 [==============================] - 65s 215ms/step - loss: 2.2028 - acc: 0.4294 - val_loss: 2.5185 - val_acc: 0.3730 Epoch 97/200 300/300 [==============================] - 65s 216ms/step - loss: 2.1851 - acc: 0.4366 - val_loss: 2.5368 - val_acc: 0.3677 Epoch 98/200 300/300 [==============================] - 64s 214ms/step - loss: 2.1898 - acc: 0.4314 - val_loss: 2.5328 - val_acc: 0.3732 Epoch 99/200 300/300 [==============================] - 65s 216ms/step - loss: 2.1973 - acc: 0.4308 - val_loss: 2.5084 - val_acc: 0.3751 Epoch 100/200 300/300 [==============================] - 65s 216ms/step - loss: 2.1940 - acc: 0.4318 - val_loss: 2.5648 - val_acc: 0.3684 Epoch 101/200 300/300 [==============================] - 65s 215ms/step - loss: 2.1710 - acc: 0.4360 - val_loss: 2.4939 - val_acc: 0.3846 Epoch 102/200 300/300 [==============================] - 64s 213ms/step - loss: 2.1689 - acc: 0.4381 - val_loss: 2.5150 - val_acc: 0.3780 Epoch 103/200 300/300 [==============================] - 64s 215ms/step - loss: 2.1696 - acc: 0.4403 - val_loss: 2.5207 - val_acc: 0.3767 Epoch 104/200 300/300 [==============================] - 65s 216ms/step - loss: 2.1609 - acc: 0.4365 - val_loss: 2.4836 - val_acc: 0.3874 Epoch 105/200 300/300 [==============================] - 65s 215ms/step - loss: 2.1666 - acc: 0.4367 - val_loss: 2.5120 - val_acc: 0.3810 Epoch 106/200 300/300 [==============================] - 65s 215ms/step - loss: 2.1536 - acc: 0.4406 - val_loss: 2.5123 - val_acc: 0.3791 Epoch 107/200 300/300 [==============================] - 64s 213ms/step - loss: 2.1673 - acc: 0.4348 - val_loss: 2.5025 - val_acc: 0.3776 Epoch 108/200 300/300 [==============================] - 65s 215ms/step - loss: 2.1467 - acc: 0.4426 - val_loss: 2.4935 - val_acc: 0.3844 Epoch 109/200 300/300 [==============================] - 64s 213ms/step - loss: 2.1683 - acc: 0.4388 - val_loss: 2.5106 - val_acc: 0.3806 Epoch 110/200 300/300 [==============================] - 64s 213ms/step - loss: 2.1347 - acc: 0.4454 - val_loss: 2.4694 - val_acc: 0.3864 Epoch 111/200 300/300 [==============================] - 65s 215ms/step - loss: 2.1368 - acc: 0.4422 - val_loss: 2.5022 - val_acc: 0.3809 Epoch 112/200 300/300 [==============================] - 64s 213ms/step - loss: 2.1443 - acc: 0.4458 - val_loss: 2.5060 - val_acc: 0.3781 Epoch 113/200 300/300 [==============================] - 64s 214ms/step - loss: 2.1154 - acc: 0.4503 - val_loss: 2.4690 - val_acc: 0.3883 Epoch 114/200 300/300 [==============================] - 64s 213ms/step - loss: 2.1214 - acc: 0.4495 - val_loss: 2.4813 - val_acc: 0.3843 Epoch 115/200 300/300 [==============================] - 64s 213ms/step - loss: 2.1304 - acc: 0.4450 - val_loss: 2.5006 - val_acc: 0.3818 Epoch 116/200 300/300 [==============================] - 64s 215ms/step - loss: 2.1019 - acc: 0.4464 - val_loss: 2.4707 - val_acc: 0.3891 Epoch 117/200 300/300 [==============================] - 64s 214ms/step - loss: 2.1021 - acc: 0.4464 - val_loss: 2.4838 - val_acc: 0.3871 Epoch 118/200 300/300 [==============================] - 64s 215ms/step - loss: 2.1082 - acc: 0.4482 - val_loss: 2.4900 - val_acc: 0.3877 Epoch 119/200 300/300 [==============================] - 65s 215ms/step - loss: 2.0943 - acc: 0.4530 - val_loss: 2.4806 - val_acc: 0.3823 Epoch 120/200 300/300 [==============================] - 64s 214ms/step - loss: 2.0915 - acc: 0.4492 - val_loss: 2.4774 - val_acc: 0.3873 Epoch 121/200 300/300 [==============================] - 64s 214ms/step - loss: 2.0776 - acc: 0.4608 - val_loss: 2.4594 - val_acc: 0.3915 Epoch 122/200 300/300 [==============================] - 65s 215ms/step - loss: 2.0648 - acc: 0.4613 - val_loss: 2.4878 - val_acc: 0.3864 Epoch 123/200 300/300 [==============================] - 64s 215ms/step - loss: 2.0915 - acc: 0.4542 - val_loss: 2.4712 - val_acc: 0.3885 Epoch 124/200 300/300 [==============================] - 65s 215ms/step - loss: 2.0755 - acc: 0.4591 - val_loss: 2.4909 - val_acc: 0.3847 Epoch 125/200 300/300 [==============================] - 64s 212ms/step - loss: 2.0695 - acc: 0.4606 - val_loss: 2.4502 - val_acc: 0.3898 Epoch 126/200 300/300 [==============================] - 64s 214ms/step - loss: 2.0543 - acc: 0.4673 - val_loss: 2.4562 - val_acc: 0.3916 Epoch 127/200 300/300 [==============================] - 64s 213ms/step - loss: 2.0535 - acc: 0.4601 - val_loss: 2.4838 - val_acc: 0.3841 Epoch 128/200 300/300 [==============================] - 65s 215ms/step - loss: 2.0587 - acc: 0.4627 - val_loss: 2.4620 - val_acc: 0.3928 Epoch 129/200 300/300 [==============================] - 64s 213ms/step - loss: 2.0324 - acc: 0.4691 - val_loss: 2.5038 - val_acc: 0.3819 Epoch 130/200 300/300 [==============================] - 65s 216ms/step - loss: 2.0557 - acc: 0.4655 - val_loss: 2.4510 - val_acc: 0.3939 Epoch 131/200 300/300 [==============================] - 64s 212ms/step - loss: 2.0306 - acc: 0.4674 - val_loss: 2.4457 - val_acc: 0.3978 Epoch 132/200 300/300 [==============================] - 64s 215ms/step - loss: 2.0093 - acc: 0.4746 - val_loss: 2.4731 - val_acc: 0.3945 Epoch 133/200 300/300 [==============================] - 64s 213ms/step - loss: 2.0172 - acc: 0.4699 - val_loss: 2.4493 - val_acc: 0.3967 Epoch 134/200 300/300 [==============================] - 64s 213ms/step - loss: 2.0239 - acc: 0.4732 - val_loss: 2.4764 - val_acc: 0.3936 Epoch 135/200 300/300 [==============================] - 65s 215ms/step - loss: 1.9999 - acc: 0.4734 - val_loss: 2.4809 - val_acc: 0.3912 Epoch 136/200 300/300 [==============================] - 64s 214ms/step - loss: 2.0337 - acc: 0.4647 - val_loss: 2.4727 - val_acc: 0.3920 Epoch 137/200 300/300 [==============================] - 65s 215ms/step - loss: 2.0110 - acc: 0.4727 - val_loss: 2.4341 - val_acc: 0.3962 Epoch 138/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9985 - acc: 0.4743 - val_loss: 2.4838 - val_acc: 0.3924 Epoch 139/200 300/300 [==============================] - 64s 213ms/step - loss: 2.0057 - acc: 0.4748 - val_loss: 2.4548 - val_acc: 0.3971 Epoch 140/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9971 - acc: 0.4760 - val_loss: 2.4842 - val_acc: 0.3944 Epoch 141/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9990 - acc: 0.4725 - val_loss: 2.4468 - val_acc: 0.3962 Epoch 142/200 300/300 [==============================] - 65s 216ms/step - loss: 1.9847 - acc: 0.4816 - val_loss: 2.4325 - val_acc: 0.3983 Epoch 143/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9959 - acc: 0.4739 - val_loss: 2.4473 - val_acc: 0.3966 Epoch 144/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9755 - acc: 0.4818 - val_loss: 2.4360 - val_acc: 0.4017 Epoch 145/200 300/300 [==============================] - 64s 214ms/step - loss: 2.0028 - acc: 0.4745 - val_loss: 2.4306 - val_acc: 0.4020 Epoch 146/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9685 - acc: 0.4823 - val_loss: 2.4374 - val_acc: 0.3972 Epoch 147/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9764 - acc: 0.4815 - val_loss: 2.4840 - val_acc: 0.3939 Epoch 148/200 300/300 [==============================] - 65s 217ms/step - loss: 1.9732 - acc: 0.4796 - val_loss: 2.4274 - val_acc: 0.4055 Epoch 149/200 300/300 [==============================] - 65s 215ms/step - loss: 1.9550 - acc: 0.4869 - val_loss: 2.4730 - val_acc: 0.3955 Epoch 150/200 300/300 [==============================] - 65s 215ms/step - loss: 1.9442 - acc: 0.4885 - val_loss: 2.4441 - val_acc: 0.3980 Epoch 151/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9600 - acc: 0.4842 - val_loss: 2.4191 - val_acc: 0.4065 Epoch 152/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9577 - acc: 0.4890 - val_loss: 2.4735 - val_acc: 0.3962 Epoch 153/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9691 - acc: 0.4845 - val_loss: 2.4795 - val_acc: 0.3897 Epoch 154/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9515 - acc: 0.4859 - val_loss: 2.4555 - val_acc: 0.3960 Epoch 155/200 300/300 [==============================] - 65s 216ms/step - loss: 1.9380 - acc: 0.4896 - val_loss: 2.4548 - val_acc: 0.3992 Epoch 156/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9275 - acc: 0.4892 - val_loss: 2.4562 - val_acc: 0.3972 Epoch 157/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9426 - acc: 0.4881 - val_loss: 2.4433 - val_acc: 0.3968 Epoch 158/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9459 - acc: 0.4848 - val_loss: 2.4199 - val_acc: 0.4027 Epoch 159/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9183 - acc: 0.4918 - val_loss: 2.4228 - val_acc: 0.4060 Epoch 160/200 300/300 [==============================] - 65s 215ms/step - loss: 1.9393 - acc: 0.4879 - val_loss: 2.4893 - val_acc: 0.3924 Epoch 161/200 300/300 [==============================] - 64s 214ms/step - loss: 1.9003 - acc: 0.4967 - val_loss: 2.4451 - val_acc: 0.4025 Epoch 162/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9349 - acc: 0.4919 - val_loss: 2.4235 - val_acc: 0.4048 Epoch 163/200 300/300 [==============================] - 64s 215ms/step - loss: 1.9212 - acc: 0.4956 - val_loss: 2.4495 - val_acc: 0.3953 Epoch 164/200 300/300 [==============================] - 64s 212ms/step - loss: 1.9121 - acc: 0.4972 - val_loss: 2.4276 - val_acc: 0.4028 Epoch 165/200 300/300 [==============================] - 64s 213ms/step - loss: 1.9120 - acc: 0.4928 - val_loss: 2.4624 - val_acc: 0.3970 Epoch 166/200 300/300 [==============================] - 65s 216ms/step - loss: 1.8920 - acc: 0.5012 - val_loss: 2.4282 - val_acc: 0.4010 Training time: 10749.689942836761
# plotting the learning curves
plot_results(history.history)
score = model_improved1.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 2.4191126823425293 Val accuracy: 0.4065000116825104
# Batch norm model 4
from keras.models import Sequential
from keras.layers.convolutional import Conv2D
from keras.layers.pooling import MaxPool2D
from keras.layers.core import Dense,Activation,Dropout,Flatten
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.initializers import RandomNormal, Constant
model_improved2 = Sequential()
model_improved2.add(Conv2D(256,(3,3),padding='same',input_shape=(32,32,3)))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(Conv2D(256,(3,3),padding='same'))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(MaxPool2D(pool_size=(2,2)))
model_improved2.add(Dropout(0.2))
model_improved2.add(Conv2D(512,(3,3),padding='same'))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(Conv2D(512,(3,3),padding='same'))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(MaxPool2D(pool_size=(2,2)))
model_improved2.add(Dropout(0.2))
model_improved2.add(Conv2D(512,(3,3),padding='same'))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(Conv2D(512,(3,3),padding='same'))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(MaxPool2D(pool_size=(2,2)))
model_improved2.add(Dropout(0.2))
model_improved2.add(Conv2D(512,(3,3),padding='same'))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(Conv2D(512,(3,3),padding='same'))
model_improved2.add(BatchNormalization())
model_improved2.add(Activation('relu'))
model_improved2.add(MaxPool2D(pool_size=(2,2)))
model_improved2.add(Dropout(0.2))
model_improved2.add(Flatten())
model_improved2.add(Dense(1024))
model_improved2.add(Activation('relu'))
model_improved2.add(Dropout(0.2))
model_improved2.add(BatchNormalization(momentum=0.95,
epsilon=0.005,
beta_initializer=RandomNormal(mean=0.0, stddev=0.05),
gamma_initializer=Constant(value=0.9)))
model_improved2.add(Dense(100,activation='softmax'))
model_improved2.summary()
2022-11-25 12:50:09.647351: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags. 2022-11-25 12:50:09.653061: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:50:09.653221: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:50:09.653325: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:50:10.655841: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:50:10.656763: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:50:10.656886: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:981] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero 2022-11-25 12:50:10.657005: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1613] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 8035 MB memory: -> device: 0, name: NVIDIA GeForce RTX 3080, pci bus id: 0000:01:00.0, compute capability: 8.6
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 32, 32, 256) 7168
batch_normalization (BatchN (None, 32, 32, 256) 1024
ormalization)
activation (Activation) (None, 32, 32, 256) 0
conv2d_1 (Conv2D) (None, 32, 32, 256) 590080
batch_normalization_1 (Batc (None, 32, 32, 256) 1024
hNormalization)
activation_1 (Activation) (None, 32, 32, 256) 0
max_pooling2d (MaxPooling2D (None, 16, 16, 256) 0
)
dropout (Dropout) (None, 16, 16, 256) 0
conv2d_2 (Conv2D) (None, 16, 16, 512) 1180160
batch_normalization_2 (Batc (None, 16, 16, 512) 2048
hNormalization)
activation_2 (Activation) (None, 16, 16, 512) 0
conv2d_3 (Conv2D) (None, 16, 16, 512) 2359808
batch_normalization_3 (Batc (None, 16, 16, 512) 2048
hNormalization)
activation_3 (Activation) (None, 16, 16, 512) 0
max_pooling2d_1 (MaxPooling (None, 8, 8, 512) 0
2D)
dropout_1 (Dropout) (None, 8, 8, 512) 0
conv2d_4 (Conv2D) (None, 8, 8, 512) 2359808
batch_normalization_4 (Batc (None, 8, 8, 512) 2048
hNormalization)
activation_4 (Activation) (None, 8, 8, 512) 0
conv2d_5 (Conv2D) (None, 8, 8, 512) 2359808
batch_normalization_5 (Batc (None, 8, 8, 512) 2048
hNormalization)
activation_5 (Activation) (None, 8, 8, 512) 0
max_pooling2d_2 (MaxPooling (None, 4, 4, 512) 0
2D)
dropout_2 (Dropout) (None, 4, 4, 512) 0
conv2d_6 (Conv2D) (None, 4, 4, 512) 2359808
batch_normalization_6 (Batc (None, 4, 4, 512) 2048
hNormalization)
activation_6 (Activation) (None, 4, 4, 512) 0
conv2d_7 (Conv2D) (None, 4, 4, 512) 2359808
batch_normalization_7 (Batc (None, 4, 4, 512) 2048
hNormalization)
activation_7 (Activation) (None, 4, 4, 512) 0
max_pooling2d_3 (MaxPooling (None, 2, 2, 512) 0
2D)
dropout_3 (Dropout) (None, 2, 2, 512) 0
flatten (Flatten) (None, 2048) 0
dense (Dense) (None, 1024) 2098176
activation_8 (Activation) (None, 1024) 0
dropout_4 (Dropout) (None, 1024) 0
batch_normalization_8 (Batc (None, 1024) 4096
hNormalization)
dense_1 (Dense) (None, 100) 102500
=================================================================
Total params: 15,795,556
Trainable params: 15,786,340
Non-trainable params: 9,216
_________________________________________________________________
# Configure the model for training
from tensorflow.keras import optimizers
model_improved2.compile(loss='categorical_crossentropy',
optimizer=optimizers.RMSprop(learning_rate=1e-4),
metrics=['acc'])
#Model 2 without Data augmentation
#steps_per_epoch= params divided by batch_size
# Training model
import time
training_start = time.time()
history = model_improved2.fit(X_train, y_train, batch_size=64, steps_per_epoch=300, epochs=200,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
# Training model
import time
Epoch 1/200 300/300 [==============================] - 23s 63ms/step - loss: 4.3186 - acc: 0.0661 - val_loss: 5.0775 - val_acc: 0.0231 Epoch 2/200 300/300 [==============================] - 19s 62ms/step - loss: 3.7722 - acc: 0.1297 - val_loss: 3.9571 - val_acc: 0.1036 Epoch 3/200 300/300 [==============================] - 19s 64ms/step - loss: 3.3863 - acc: 0.1913 - val_loss: 3.2026 - val_acc: 0.2180 Epoch 4/200 300/300 [==============================] - 19s 63ms/step - loss: 3.0953 - acc: 0.2410 - val_loss: 3.0620 - val_acc: 0.2563 Epoch 5/200 300/300 [==============================] - 19s 64ms/step - loss: 2.8025 - acc: 0.2945 - val_loss: 2.7434 - val_acc: 0.3149 Epoch 6/200 300/300 [==============================] - 19s 63ms/step - loss: 2.5311 - acc: 0.3449 - val_loss: 2.3485 - val_acc: 0.3900 Epoch 7/200 300/300 [==============================] - 19s 63ms/step - loss: 2.3862 - acc: 0.3796 - val_loss: 2.4275 - val_acc: 0.3832 Epoch 8/200 300/300 [==============================] - 19s 63ms/step - loss: 2.2578 - acc: 0.4102 - val_loss: 2.1187 - val_acc: 0.4386 Epoch 9/200 300/300 [==============================] - 19s 62ms/step - loss: 2.0655 - acc: 0.4541 - val_loss: 2.1664 - val_acc: 0.4232 Epoch 10/200 300/300 [==============================] - 19s 62ms/step - loss: 2.0126 - acc: 0.4636 - val_loss: 2.0147 - val_acc: 0.4606 Epoch 11/200 300/300 [==============================] - 19s 62ms/step - loss: 1.8514 - acc: 0.5000 - val_loss: 2.0351 - val_acc: 0.4579 Epoch 12/200 300/300 [==============================] - 19s 62ms/step - loss: 1.7782 - acc: 0.5151 - val_loss: 1.8509 - val_acc: 0.5023 Epoch 13/200 300/300 [==============================] - 19s 62ms/step - loss: 1.7543 - acc: 0.5215 - val_loss: 1.9569 - val_acc: 0.4844 Epoch 14/200 300/300 [==============================] - 19s 62ms/step - loss: 1.5714 - acc: 0.5678 - val_loss: 1.9204 - val_acc: 0.4931 Epoch 15/200 300/300 [==============================] - 19s 62ms/step - loss: 1.5523 - acc: 0.5653 - val_loss: 1.8336 - val_acc: 0.5079 Epoch 16/200 300/300 [==============================] - 19s 62ms/step - loss: 1.4714 - acc: 0.5915 - val_loss: 1.8923 - val_acc: 0.4978 Epoch 17/200 300/300 [==============================] - 19s 62ms/step - loss: 1.3681 - acc: 0.6172 - val_loss: 1.7475 - val_acc: 0.5328 Epoch 18/200 300/300 [==============================] - 19s 62ms/step - loss: 1.3850 - acc: 0.6107 - val_loss: 1.9139 - val_acc: 0.4972 Epoch 19/200 300/300 [==============================] - 19s 62ms/step - loss: 1.2352 - acc: 0.6517 - val_loss: 1.6843 - val_acc: 0.5450 Epoch 20/200 300/300 [==============================] - 19s 62ms/step - loss: 1.2335 - acc: 0.6491 - val_loss: 1.7395 - val_acc: 0.5304 Epoch 21/200 300/300 [==============================] - 19s 62ms/step - loss: 1.1891 - acc: 0.6617 - val_loss: 1.6545 - val_acc: 0.5540 Epoch 22/200 300/300 [==============================] - 19s 64ms/step - loss: 1.0728 - acc: 0.6923 - val_loss: 1.6748 - val_acc: 0.5451 Epoch 23/200 300/300 [==============================] - 20s 66ms/step - loss: 1.0684 - acc: 0.6952 - val_loss: 1.6693 - val_acc: 0.5556 Epoch 24/200 300/300 [==============================] - 20s 66ms/step - loss: 1.0086 - acc: 0.7115 - val_loss: 1.5492 - val_acc: 0.5881 Epoch 25/200 300/300 [==============================] - 19s 64ms/step - loss: 0.9469 - acc: 0.7286 - val_loss: 1.5427 - val_acc: 0.5785 Epoch 26/200 300/300 [==============================] - 19s 62ms/step - loss: 0.9475 - acc: 0.7279 - val_loss: 1.4906 - val_acc: 0.5963 Epoch 27/200 300/300 [==============================] - 18s 61ms/step - loss: 0.8120 - acc: 0.7709 - val_loss: 1.6262 - val_acc: 0.5761 Epoch 28/200 300/300 [==============================] - 18s 62ms/step - loss: 0.8408 - acc: 0.7555 - val_loss: 1.5083 - val_acc: 0.5886 Epoch 29/200 300/300 [==============================] - 18s 61ms/step - loss: 0.8098 - acc: 0.7659 - val_loss: 1.5445 - val_acc: 0.5903 Epoch 30/200 300/300 [==============================] - 19s 62ms/step - loss: 0.7136 - acc: 0.7901 - val_loss: 1.4976 - val_acc: 0.5980 Epoch 31/200 300/300 [==============================] - 18s 62ms/step - loss: 0.7387 - acc: 0.7806 - val_loss: 1.7597 - val_acc: 0.5626 Epoch 32/200 300/300 [==============================] - 19s 62ms/step - loss: 0.6542 - acc: 0.8095 - val_loss: 1.5068 - val_acc: 0.5983 Epoch 33/200 300/300 [==============================] - 18s 61ms/step - loss: 0.6278 - acc: 0.8160 - val_loss: 1.6237 - val_acc: 0.5884 Epoch 34/200 300/300 [==============================] - 18s 62ms/step - loss: 0.6461 - acc: 0.8076 - val_loss: 1.4451 - val_acc: 0.6131 Epoch 35/200 300/300 [==============================] - 19s 62ms/step - loss: 0.5352 - acc: 0.8455 - val_loss: 1.6393 - val_acc: 0.5825 Epoch 36/200 300/300 [==============================] - 18s 61ms/step - loss: 0.5662 - acc: 0.8323 - val_loss: 1.5349 - val_acc: 0.6043 Epoch 37/200 300/300 [==============================] - 18s 61ms/step - loss: 0.5146 - acc: 0.8492 - val_loss: 1.4864 - val_acc: 0.6041 Epoch 38/200 300/300 [==============================] - 19s 62ms/step - loss: 0.4640 - acc: 0.8653 - val_loss: 1.4442 - val_acc: 0.6218 Epoch 39/200 300/300 [==============================] - 19s 62ms/step - loss: 0.4901 - acc: 0.8557 - val_loss: 1.5593 - val_acc: 0.6097 Epoch 40/200 300/300 [==============================] - 19s 62ms/step - loss: 0.4020 - acc: 0.8864 - val_loss: 1.4866 - val_acc: 0.6220 Epoch 41/200 300/300 [==============================] - 19s 62ms/step - loss: 0.4137 - acc: 0.8790 - val_loss: 1.6361 - val_acc: 0.5972 Epoch 42/200 300/300 [==============================] - 18s 61ms/step - loss: 0.4114 - acc: 0.8766 - val_loss: 1.5126 - val_acc: 0.6106 Epoch 43/200 300/300 [==============================] - 18s 62ms/step - loss: 0.3464 - acc: 0.8994 - val_loss: 1.5637 - val_acc: 0.6109 Epoch 44/200 300/300 [==============================] - 19s 62ms/step - loss: 0.3735 - acc: 0.8907 - val_loss: 1.5108 - val_acc: 0.6224 Epoch 45/200 300/300 [==============================] - 18s 62ms/step - loss: 0.3199 - acc: 0.9073 - val_loss: 1.8060 - val_acc: 0.5860 Epoch 46/200 300/300 [==============================] - 19s 62ms/step - loss: 0.3145 - acc: 0.9052 - val_loss: 1.4549 - val_acc: 0.6311 Epoch 47/200 300/300 [==============================] - 18s 61ms/step - loss: 0.3328 - acc: 0.8991 - val_loss: 1.5853 - val_acc: 0.6119 Epoch 48/200 300/300 [==============================] - 18s 62ms/step - loss: 0.2526 - acc: 0.9270 - val_loss: 1.6588 - val_acc: 0.6074 Epoch 49/200 300/300 [==============================] - 18s 62ms/step - loss: 0.2837 - acc: 0.9171 - val_loss: 1.8334 - val_acc: 0.5849 Epoch 50/200 300/300 [==============================] - 19s 62ms/step - loss: 0.2698 - acc: 0.9194 - val_loss: 1.5015 - val_acc: 0.6351 Epoch 51/200 300/300 [==============================] - 19s 62ms/step - loss: 0.2426 - acc: 0.9305 - val_loss: 1.5500 - val_acc: 0.6372 Epoch 52/200 300/300 [==============================] - 19s 62ms/step - loss: 0.2665 - acc: 0.9214 - val_loss: 1.5449 - val_acc: 0.6262 Epoch 53/200 300/300 [==============================] - 19s 62ms/step - loss: 0.2083 - acc: 0.9423 - val_loss: 1.5853 - val_acc: 0.6214 Epoch 54/200 300/300 [==============================] - 19s 62ms/step - loss: 0.2279 - acc: 0.9321 - val_loss: 1.6375 - val_acc: 0.6174 Epoch 55/200 300/300 [==============================] - 19s 62ms/step - loss: 0.2204 - acc: 0.9354 - val_loss: 1.6773 - val_acc: 0.6054 Epoch 56/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1914 - acc: 0.9452 - val_loss: 1.6327 - val_acc: 0.6204 Epoch 57/200 300/300 [==============================] - 19s 62ms/step - loss: 0.2063 - acc: 0.9381 - val_loss: 1.7848 - val_acc: 0.6082 Epoch 58/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1816 - acc: 0.9471 - val_loss: 1.6076 - val_acc: 0.6365 Epoch 59/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1798 - acc: 0.9474 - val_loss: 1.6445 - val_acc: 0.6282 Epoch 60/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1958 - acc: 0.9433 - val_loss: 1.6328 - val_acc: 0.6340 Epoch 61/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1523 - acc: 0.9562 - val_loss: 1.6459 - val_acc: 0.6275 Epoch 62/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1754 - acc: 0.9501 - val_loss: 1.8562 - val_acc: 0.6050 Epoch 63/200 300/300 [==============================] - 18s 61ms/step - loss: 0.1631 - acc: 0.9514 - val_loss: 1.6812 - val_acc: 0.6204 Epoch 64/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1528 - acc: 0.9569 - val_loss: 1.6955 - val_acc: 0.6209 Epoch 65/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1645 - acc: 0.9514 - val_loss: 1.6942 - val_acc: 0.6212 Epoch 66/200 300/300 [==============================] - 19s 62ms/step - loss: 0.1383 - acc: 0.9601 - val_loss: 1.7224 - val_acc: 0.6130 Training time: 1237.188719034195
# plotting the learning curves
plot_results(history.history)
score = model_improved2.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 1.5499601364135742 Val accuracy: 0.6371999979019165
# Training model
import time
training_start = time.time()
history = model_improved2.fit(train_datagen.flow(X_train, y_train, batch_size=64),
steps_per_epoch=300,
epochs=200,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
Epoch 1/200
2022-11-25 12:50:26.317012: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential/dropout/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer 2022-11-25 12:50:27.645774: I tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:428] Loaded cuDNN version 8100 2022-11-25 12:50:31.166508: I tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:630] TensorFloat-32 will be used for the matrix multiplication. This will only be logged once. 2022-11-25 12:50:31.448847: I tensorflow/compiler/xla/service/service.cc:173] XLA service 0x7f224dbb03f0 initialized for platform CUDA (this does not guarantee that XLA will be used). Devices: 2022-11-25 12:50:31.448869: I tensorflow/compiler/xla/service/service.cc:181] StreamExecutor device (0): NVIDIA GeForce RTX 3080, Compute Capability 8.6 2022-11-25 12:50:31.518262: I tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.cc:268] disabling MLIR crash reproducer, set env var `MLIR_CRASH_REPRODUCER_DIRECTORY` to enable. 2022-11-25 12:50:31.949688: I tensorflow/compiler/jit/xla_compilation_cache.cc:477] Compiled cluster using XLA! This line is logged at most once for the lifetime of the process.
299/300 [============================>.] - ETA: 0s - loss: 4.3422 - acc: 0.0653
2022-11-25 12:50:45.300907: W tensorflow/tsl/framework/cpu_allocator_impl.cc:82] Allocation of 122880000 exceeds 10% of free system memory. 2022-11-25 12:50:45.371211: W tensorflow/tsl/framework/cpu_allocator_impl.cc:82] Allocation of 122880000 exceeds 10% of free system memory.
300/300 [==============================] - 23s 45ms/step - loss: 4.3404 - acc: 0.0655 - val_loss: 5.1879 - val_acc: 0.0249 Epoch 2/200 300/300 [==============================] - 13s 42ms/step - loss: 3.8042 - acc: 0.1287 - val_loss: 3.9017 - val_acc: 0.1158 Epoch 3/200 300/300 [==============================] - 13s 42ms/step - loss: 3.5032 - acc: 0.1733 - val_loss: 3.3748 - val_acc: 0.2057 Epoch 4/200 300/300 [==============================] - 13s 43ms/step - loss: 3.2342 - acc: 0.2153 - val_loss: 2.9647 - val_acc: 0.2678 Epoch 5/200 300/300 [==============================] - 13s 42ms/step - loss: 3.0210 - acc: 0.2578 - val_loss: 2.8166 - val_acc: 0.2918 Epoch 6/200 300/300 [==============================] - 13s 42ms/step - loss: 2.8187 - acc: 0.2949 - val_loss: 2.9012 - val_acc: 0.2832 Epoch 7/200 300/300 [==============================] - 13s 42ms/step - loss: 2.6784 - acc: 0.3163 - val_loss: 2.5791 - val_acc: 0.3448 Epoch 8/200 300/300 [==============================] - 13s 42ms/step - loss: 2.5028 - acc: 0.3556 - val_loss: 2.5499 - val_acc: 0.3521 Epoch 9/200 300/300 [==============================] - 13s 42ms/step - loss: 2.3951 - acc: 0.3786 - val_loss: 2.3692 - val_acc: 0.3871 Epoch 10/200 300/300 [==============================] - 13s 42ms/step - loss: 2.2907 - acc: 0.4031 - val_loss: 2.5567 - val_acc: 0.3744 Epoch 11/200 300/300 [==============================] - 13s 42ms/step - loss: 2.1808 - acc: 0.4240 - val_loss: 2.1797 - val_acc: 0.4353 Epoch 12/200 300/300 [==============================] - 13s 42ms/step - loss: 2.0914 - acc: 0.4437 - val_loss: 2.2436 - val_acc: 0.4152 Epoch 13/200 300/300 [==============================] - 13s 42ms/step - loss: 2.0094 - acc: 0.4604 - val_loss: 2.0576 - val_acc: 0.4615 Epoch 14/200 300/300 [==============================] - 13s 42ms/step - loss: 1.9353 - acc: 0.4812 - val_loss: 1.9376 - val_acc: 0.4817 Epoch 15/200 300/300 [==============================] - 13s 42ms/step - loss: 1.8913 - acc: 0.4915 - val_loss: 2.0569 - val_acc: 0.4511 Epoch 16/200 300/300 [==============================] - 13s 42ms/step - loss: 1.8412 - acc: 0.5058 - val_loss: 1.9837 - val_acc: 0.4718 Epoch 17/200 300/300 [==============================] - 13s 42ms/step - loss: 1.7976 - acc: 0.5100 - val_loss: 1.8366 - val_acc: 0.5081 Epoch 18/200 300/300 [==============================] - 13s 42ms/step - loss: 1.7378 - acc: 0.5241 - val_loss: 1.7052 - val_acc: 0.5319 Epoch 19/200 300/300 [==============================] - 13s 42ms/step - loss: 1.6659 - acc: 0.5439 - val_loss: 1.7848 - val_acc: 0.5194 Epoch 20/200 300/300 [==============================] - 13s 42ms/step - loss: 1.6388 - acc: 0.5505 - val_loss: 1.7279 - val_acc: 0.5371 Epoch 21/200 300/300 [==============================] - 13s 42ms/step - loss: 1.5913 - acc: 0.5638 - val_loss: 2.0478 - val_acc: 0.4782 Epoch 22/200 300/300 [==============================] - 13s 42ms/step - loss: 1.5673 - acc: 0.5659 - val_loss: 1.7166 - val_acc: 0.5363 Epoch 23/200 300/300 [==============================] - 13s 42ms/step - loss: 1.5137 - acc: 0.5785 - val_loss: 1.7224 - val_acc: 0.5349 Epoch 24/200 300/300 [==============================] - 13s 42ms/step - loss: 1.4776 - acc: 0.5892 - val_loss: 1.6081 - val_acc: 0.5654 Epoch 25/200 300/300 [==============================] - 13s 42ms/step - loss: 1.4690 - acc: 0.5886 - val_loss: 1.8177 - val_acc: 0.5206 Epoch 26/200 300/300 [==============================] - 13s 42ms/step - loss: 1.4231 - acc: 0.6028 - val_loss: 1.7918 - val_acc: 0.5211 Epoch 27/200 300/300 [==============================] - 13s 42ms/step - loss: 1.3878 - acc: 0.6120 - val_loss: 1.6150 - val_acc: 0.5639 Epoch 28/200 300/300 [==============================] - 13s 42ms/step - loss: 1.3477 - acc: 0.6218 - val_loss: 1.6564 - val_acc: 0.5455 Epoch 29/200 300/300 [==============================] - 13s 42ms/step - loss: 1.3172 - acc: 0.6339 - val_loss: 1.6218 - val_acc: 0.5606 Epoch 30/200 300/300 [==============================] - 13s 42ms/step - loss: 1.3075 - acc: 0.6304 - val_loss: 1.5274 - val_acc: 0.5811 Epoch 31/200 300/300 [==============================] - 13s 42ms/step - loss: 1.2879 - acc: 0.6340 - val_loss: 1.4556 - val_acc: 0.5960 Epoch 32/200 300/300 [==============================] - 13s 42ms/step - loss: 1.2511 - acc: 0.6468 - val_loss: 1.5065 - val_acc: 0.5874 Epoch 33/200 300/300 [==============================] - 13s 42ms/step - loss: 1.2228 - acc: 0.6523 - val_loss: 1.7194 - val_acc: 0.5488 Epoch 34/200 300/300 [==============================] - 13s 42ms/step - loss: 1.1972 - acc: 0.6637 - val_loss: 1.5006 - val_acc: 0.5924 Epoch 35/200 300/300 [==============================] - 13s 42ms/step - loss: 1.1719 - acc: 0.6687 - val_loss: 1.5488 - val_acc: 0.5804 Epoch 36/200 300/300 [==============================] - 13s 42ms/step - loss: 1.1518 - acc: 0.6715 - val_loss: 1.5156 - val_acc: 0.5954 Epoch 37/200 300/300 [==============================] - 13s 42ms/step - loss: 1.1345 - acc: 0.6758 - val_loss: 1.4935 - val_acc: 0.5990 Epoch 38/200 300/300 [==============================] - 13s 42ms/step - loss: 1.1178 - acc: 0.6784 - val_loss: 1.5135 - val_acc: 0.5946 Epoch 39/200 300/300 [==============================] - 13s 42ms/step - loss: 1.0842 - acc: 0.6912 - val_loss: 1.5275 - val_acc: 0.5943 Epoch 40/200 300/300 [==============================] - 13s 42ms/step - loss: 1.0601 - acc: 0.6952 - val_loss: 1.4752 - val_acc: 0.5999 Epoch 41/200 300/300 [==============================] - 13s 42ms/step - loss: 1.0394 - acc: 0.6984 - val_loss: 1.4156 - val_acc: 0.6160 Epoch 42/200 300/300 [==============================] - 13s 42ms/step - loss: 1.0179 - acc: 0.7060 - val_loss: 1.4305 - val_acc: 0.6104 Epoch 43/200 300/300 [==============================] - 13s 42ms/step - loss: 0.9913 - acc: 0.7108 - val_loss: 1.4538 - val_acc: 0.6140 Epoch 44/200 300/300 [==============================] - 13s 42ms/step - loss: 0.9859 - acc: 0.7176 - val_loss: 1.4783 - val_acc: 0.6020 Epoch 45/200 300/300 [==============================] - 13s 42ms/step - loss: 0.9843 - acc: 0.7168 - val_loss: 1.4897 - val_acc: 0.5968 Epoch 46/200 300/300 [==============================] - 13s 42ms/step - loss: 0.9714 - acc: 0.7175 - val_loss: 1.5109 - val_acc: 0.6033 Epoch 47/200 300/300 [==============================] - 13s 42ms/step - loss: 0.9392 - acc: 0.7265 - val_loss: 1.4063 - val_acc: 0.6240 Epoch 48/200 300/300 [==============================] - 13s 42ms/step - loss: 0.9195 - acc: 0.7303 - val_loss: 1.4345 - val_acc: 0.6155 Epoch 49/200 300/300 [==============================] - 13s 42ms/step - loss: 0.9116 - acc: 0.7329 - val_loss: 1.4141 - val_acc: 0.6227 Epoch 50/200 300/300 [==============================] - 13s 42ms/step - loss: 0.8750 - acc: 0.7443 - val_loss: 1.3688 - val_acc: 0.6272 Epoch 51/200 300/300 [==============================] - 13s 42ms/step - loss: 0.8850 - acc: 0.7416 - val_loss: 1.4307 - val_acc: 0.6213 Epoch 52/200 300/300 [==============================] - 13s 42ms/step - loss: 0.8629 - acc: 0.7479 - val_loss: 1.3613 - val_acc: 0.6313 Epoch 53/200 300/300 [==============================] - 13s 42ms/step - loss: 0.8436 - acc: 0.7523 - val_loss: 1.3369 - val_acc: 0.6382 Epoch 54/200 300/300 [==============================] - 13s 42ms/step - loss: 0.8242 - acc: 0.7589 - val_loss: 1.3870 - val_acc: 0.6340 Epoch 55/200 300/300 [==============================] - 13s 42ms/step - loss: 0.8192 - acc: 0.7581 - val_loss: 1.3470 - val_acc: 0.6383 Epoch 56/200 300/300 [==============================] - 13s 42ms/step - loss: 0.8056 - acc: 0.7622 - val_loss: 1.3533 - val_acc: 0.6345 Epoch 57/200 300/300 [==============================] - 13s 42ms/step - loss: 0.7992 - acc: 0.7672 - val_loss: 1.4098 - val_acc: 0.6266 Epoch 58/200 300/300 [==============================] - 13s 42ms/step - loss: 0.7876 - acc: 0.7669 - val_loss: 1.3585 - val_acc: 0.6383 Epoch 59/200 300/300 [==============================] - 13s 42ms/step - loss: 0.7547 - acc: 0.7771 - val_loss: 1.3877 - val_acc: 0.6398 Epoch 60/200 300/300 [==============================] - 13s 42ms/step - loss: 0.7499 - acc: 0.7751 - val_loss: 1.3180 - val_acc: 0.6468 Epoch 61/200 300/300 [==============================] - 13s 42ms/step - loss: 0.7267 - acc: 0.7844 - val_loss: 1.4469 - val_acc: 0.6190 Epoch 62/200 300/300 [==============================] - 13s 42ms/step - loss: 0.7243 - acc: 0.7855 - val_loss: 1.3896 - val_acc: 0.6321 Epoch 63/200 300/300 [==============================] - 13s 42ms/step - loss: 0.7143 - acc: 0.7871 - val_loss: 1.4260 - val_acc: 0.6338 Epoch 64/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6854 - acc: 0.7937 - val_loss: 1.4950 - val_acc: 0.6221 Epoch 65/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6732 - acc: 0.8006 - val_loss: 1.3641 - val_acc: 0.6438 Epoch 66/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6712 - acc: 0.8028 - val_loss: 1.3525 - val_acc: 0.6400 Epoch 67/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6613 - acc: 0.8009 - val_loss: 1.2955 - val_acc: 0.6561 Epoch 68/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6452 - acc: 0.8076 - val_loss: 1.3721 - val_acc: 0.6447 Epoch 69/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6385 - acc: 0.8071 - val_loss: 1.3353 - val_acc: 0.6522 Epoch 70/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6386 - acc: 0.8111 - val_loss: 1.3809 - val_acc: 0.6445 Epoch 71/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6342 - acc: 0.8090 - val_loss: 1.4633 - val_acc: 0.6329 Epoch 72/200 300/300 [==============================] - 13s 42ms/step - loss: 0.6095 - acc: 0.8166 - val_loss: 1.3778 - val_acc: 0.6503 Epoch 73/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5953 - acc: 0.8228 - val_loss: 1.3826 - val_acc: 0.6464 Epoch 74/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5983 - acc: 0.8194 - val_loss: 1.4473 - val_acc: 0.6365 Epoch 75/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5798 - acc: 0.8277 - val_loss: 1.2825 - val_acc: 0.6672 Epoch 76/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5667 - acc: 0.8317 - val_loss: 1.3145 - val_acc: 0.6624 Epoch 77/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5486 - acc: 0.8330 - val_loss: 1.3977 - val_acc: 0.6445 Epoch 78/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5413 - acc: 0.8362 - val_loss: 1.3760 - val_acc: 0.6562 Epoch 79/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5361 - acc: 0.8381 - val_loss: 1.3402 - val_acc: 0.6553 Epoch 80/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5357 - acc: 0.8375 - val_loss: 1.3408 - val_acc: 0.6523 Epoch 81/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5263 - acc: 0.8419 - val_loss: 1.3465 - val_acc: 0.6677 Epoch 82/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5198 - acc: 0.8429 - val_loss: 1.3397 - val_acc: 0.6562 Epoch 83/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5204 - acc: 0.8430 - val_loss: 1.3172 - val_acc: 0.6642 Epoch 84/200 300/300 [==============================] - 13s 42ms/step - loss: 0.5126 - acc: 0.8456 - val_loss: 1.3172 - val_acc: 0.6681 Epoch 85/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4968 - acc: 0.8501 - val_loss: 1.3371 - val_acc: 0.6620 Epoch 86/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4912 - acc: 0.8506 - val_loss: 1.5556 - val_acc: 0.6228 Epoch 87/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4726 - acc: 0.8551 - val_loss: 1.4124 - val_acc: 0.6495 Epoch 88/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4602 - acc: 0.8591 - val_loss: 1.2731 - val_acc: 0.6810 Epoch 89/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4587 - acc: 0.8606 - val_loss: 1.3930 - val_acc: 0.6581 Epoch 90/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4463 - acc: 0.8624 - val_loss: 1.3797 - val_acc: 0.6511 Epoch 91/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4446 - acc: 0.8671 - val_loss: 1.3647 - val_acc: 0.6684 Epoch 92/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4444 - acc: 0.8648 - val_loss: 1.3891 - val_acc: 0.6578 Epoch 93/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4417 - acc: 0.8666 - val_loss: 1.3144 - val_acc: 0.6685 Epoch 94/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4291 - acc: 0.8697 - val_loss: 1.4056 - val_acc: 0.6528 Epoch 95/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4138 - acc: 0.8716 - val_loss: 1.3712 - val_acc: 0.6618 Epoch 96/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4200 - acc: 0.8706 - val_loss: 1.3546 - val_acc: 0.6652 Epoch 97/200 300/300 [==============================] - 13s 42ms/step - loss: 0.4070 - acc: 0.8742 - val_loss: 1.3440 - val_acc: 0.6677 Epoch 98/200 300/300 [==============================] - 13s 42ms/step - loss: 0.3930 - acc: 0.8807 - val_loss: 1.3089 - val_acc: 0.6741 Epoch 99/200 300/300 [==============================] - 13s 42ms/step - loss: 0.3977 - acc: 0.8767 - val_loss: 1.3749 - val_acc: 0.6700 Epoch 100/200 300/300 [==============================] - 13s 42ms/step - loss: 0.3877 - acc: 0.8807 - val_loss: 1.4365 - val_acc: 0.6520 Epoch 101/200 300/300 [==============================] - 13s 42ms/step - loss: 0.3762 - acc: 0.8835 - val_loss: 1.3567 - val_acc: 0.6732 Epoch 102/200 300/300 [==============================] - 13s 42ms/step - loss: 0.3727 - acc: 0.8857 - val_loss: 1.3324 - val_acc: 0.6778 Epoch 103/200 300/300 [==============================] - 13s 42ms/step - loss: 0.3717 - acc: 0.8848 - val_loss: 1.3656 - val_acc: 0.6724 Training time: 1318.0679969787598
# plotting the learning curves
plot_results(history.history)
score = model_improved2.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
2022-11-25 13:30:14.075135: W tensorflow/tsl/framework/cpu_allocator_impl.cc:82] Allocation of 122880000 exceeds 10% of free system memory. 2022-11-25 13:30:14.352388: W tensorflow/tsl/framework/cpu_allocator_impl.cc:82] Allocation of 122880000 exceeds 10% of free system memory.
Val loss: 1.2731174230575562 Val accuracy: 0.6809999942779541
Model severely overfits the train data which cauzes the test data to be very low
# Training model
import time
training_start = time.time()
history = model_improved2.fit(train_datagen.flow(X_train_aug, y_train_aug, batch_size=64),
steps_per_epoch=300,
epochs=200,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
Epoch 1/200 300/300 [==============================] - 21s 66ms/step - loss: 4.3628 - acc: 0.0654 - val_loss: 5.2424 - val_acc: 0.0288 Epoch 2/200 300/300 [==============================] - 19s 64ms/step - loss: 3.8169 - acc: 0.1221 - val_loss: 3.9006 - val_acc: 0.1089 Epoch 3/200 300/300 [==============================] - 19s 64ms/step - loss: 3.5074 - acc: 0.1716 - val_loss: 3.4101 - val_acc: 0.1976 Epoch 4/200 300/300 [==============================] - 19s 64ms/step - loss: 3.2514 - acc: 0.2124 - val_loss: 3.3231 - val_acc: 0.2075 Epoch 5/200 300/300 [==============================] - 19s 64ms/step - loss: 3.0291 - acc: 0.2546 - val_loss: 2.8923 - val_acc: 0.2897 Epoch 6/200 300/300 [==============================] - 19s 64ms/step - loss: 2.8501 - acc: 0.2840 - val_loss: 2.9810 - val_acc: 0.2752 Epoch 7/200 300/300 [==============================] - 19s 64ms/step - loss: 2.7071 - acc: 0.3150 - val_loss: 2.9559 - val_acc: 0.2877 Epoch 8/200 300/300 [==============================] - 19s 64ms/step - loss: 2.5628 - acc: 0.3438 - val_loss: 2.6304 - val_acc: 0.3415 Epoch 9/200 300/300 [==============================] - 19s 64ms/step - loss: 2.4501 - acc: 0.3656 - val_loss: 2.3427 - val_acc: 0.3899 Epoch 10/200 300/300 [==============================] - 19s 64ms/step - loss: 2.3404 - acc: 0.3867 - val_loss: 2.1739 - val_acc: 0.4307 Epoch 11/200 300/300 [==============================] - 19s 64ms/step - loss: 2.2523 - acc: 0.4100 - val_loss: 2.2889 - val_acc: 0.4016 Epoch 12/200 300/300 [==============================] - 19s 64ms/step - loss: 2.1902 - acc: 0.4236 - val_loss: 2.1647 - val_acc: 0.4325 Epoch 13/200 300/300 [==============================] - 19s 64ms/step - loss: 2.0967 - acc: 0.4403 - val_loss: 2.1929 - val_acc: 0.4330 Epoch 14/200 300/300 [==============================] - 19s 64ms/step - loss: 2.0274 - acc: 0.4540 - val_loss: 2.0296 - val_acc: 0.4645 Epoch 15/200 300/300 [==============================] - 19s 64ms/step - loss: 1.9754 - acc: 0.4727 - val_loss: 2.0476 - val_acc: 0.4629 Epoch 16/200 300/300 [==============================] - 19s 64ms/step - loss: 1.9129 - acc: 0.4884 - val_loss: 1.8769 - val_acc: 0.4976 Epoch 17/200 300/300 [==============================] - 19s 64ms/step - loss: 1.8592 - acc: 0.5007 - val_loss: 1.9339 - val_acc: 0.4873 Epoch 18/200 300/300 [==============================] - 19s 64ms/step - loss: 1.7985 - acc: 0.5117 - val_loss: 1.8624 - val_acc: 0.4985 Epoch 19/200 300/300 [==============================] - 19s 64ms/step - loss: 1.7494 - acc: 0.5276 - val_loss: 1.8353 - val_acc: 0.5107 Epoch 20/200 300/300 [==============================] - 19s 64ms/step - loss: 1.6890 - acc: 0.5401 - val_loss: 1.8908 - val_acc: 0.5024 Epoch 21/200 300/300 [==============================] - 19s 64ms/step - loss: 1.6779 - acc: 0.5445 - val_loss: 1.8334 - val_acc: 0.5141 Epoch 22/200 300/300 [==============================] - 19s 64ms/step - loss: 1.6469 - acc: 0.5521 - val_loss: 1.8807 - val_acc: 0.5015 Epoch 23/200 300/300 [==============================] - 19s 64ms/step - loss: 1.6238 - acc: 0.5558 - val_loss: 1.7235 - val_acc: 0.5364 Epoch 24/200 300/300 [==============================] - 19s 64ms/step - loss: 1.5565 - acc: 0.5695 - val_loss: 1.7095 - val_acc: 0.5406 Epoch 25/200 300/300 [==============================] - 19s 64ms/step - loss: 1.5255 - acc: 0.5780 - val_loss: 1.5889 - val_acc: 0.5662 Epoch 26/200 300/300 [==============================] - 19s 64ms/step - loss: 1.4953 - acc: 0.5837 - val_loss: 1.9444 - val_acc: 0.4938 Epoch 27/200 300/300 [==============================] - 19s 65ms/step - loss: 1.4460 - acc: 0.5989 - val_loss: 1.6108 - val_acc: 0.5607 Epoch 28/200 300/300 [==============================] - 19s 65ms/step - loss: 1.4293 - acc: 0.6036 - val_loss: 1.5103 - val_acc: 0.5843 Epoch 29/200 300/300 [==============================] - 19s 64ms/step - loss: 1.4011 - acc: 0.6052 - val_loss: 1.6046 - val_acc: 0.5652 Epoch 30/200 300/300 [==============================] - 19s 64ms/step - loss: 1.3864 - acc: 0.6146 - val_loss: 1.5406 - val_acc: 0.5811 Epoch 31/200 300/300 [==============================] - 19s 64ms/step - loss: 1.3398 - acc: 0.6258 - val_loss: 1.6974 - val_acc: 0.5554 Epoch 32/200 300/300 [==============================] - 19s 64ms/step - loss: 1.3156 - acc: 0.6320 - val_loss: 1.5583 - val_acc: 0.5734 Epoch 33/200 300/300 [==============================] - 19s 65ms/step - loss: 1.3093 - acc: 0.6327 - val_loss: 1.5175 - val_acc: 0.5870 Epoch 34/200 300/300 [==============================] - 19s 64ms/step - loss: 1.2612 - acc: 0.6431 - val_loss: 1.5971 - val_acc: 0.5643 Epoch 35/200 300/300 [==============================] - 19s 64ms/step - loss: 1.2666 - acc: 0.6415 - val_loss: 1.4649 - val_acc: 0.6019 Epoch 36/200 300/300 [==============================] - 19s 64ms/step - loss: 1.2103 - acc: 0.6565 - val_loss: 1.4889 - val_acc: 0.5881 Epoch 37/200 300/300 [==============================] - 19s 64ms/step - loss: 1.2095 - acc: 0.6567 - val_loss: 1.4628 - val_acc: 0.6031 Epoch 38/200 300/300 [==============================] - 19s 64ms/step - loss: 1.1797 - acc: 0.6677 - val_loss: 1.5035 - val_acc: 0.5929 Epoch 39/200 300/300 [==============================] - 19s 64ms/step - loss: 1.1610 - acc: 0.6714 - val_loss: 1.5420 - val_acc: 0.5923 Epoch 40/200 300/300 [==============================] - 19s 64ms/step - loss: 1.1286 - acc: 0.6748 - val_loss: 1.4932 - val_acc: 0.5975 Epoch 41/200 300/300 [==============================] - 19s 64ms/step - loss: 1.1201 - acc: 0.6792 - val_loss: 1.5317 - val_acc: 0.5830 Epoch 42/200 300/300 [==============================] - 19s 64ms/step - loss: 1.1139 - acc: 0.6801 - val_loss: 1.5039 - val_acc: 0.5965 Epoch 43/200 300/300 [==============================] - 19s 64ms/step - loss: 1.0825 - acc: 0.6910 - val_loss: 1.4503 - val_acc: 0.6097 Epoch 44/200 300/300 [==============================] - 19s 64ms/step - loss: 1.0772 - acc: 0.6915 - val_loss: 1.4906 - val_acc: 0.6004 Epoch 45/200 300/300 [==============================] - 19s 64ms/step - loss: 1.0516 - acc: 0.7013 - val_loss: 1.4709 - val_acc: 0.6099 Epoch 46/200 300/300 [==============================] - 19s 64ms/step - loss: 1.0333 - acc: 0.7006 - val_loss: 1.5692 - val_acc: 0.5912 Epoch 47/200 300/300 [==============================] - 19s 65ms/step - loss: 1.0072 - acc: 0.7087 - val_loss: 1.4352 - val_acc: 0.6135 Epoch 48/200 300/300 [==============================] - 19s 64ms/step - loss: 0.9948 - acc: 0.7132 - val_loss: 1.4640 - val_acc: 0.6032 Epoch 49/200 300/300 [==============================] - 19s 65ms/step - loss: 0.9689 - acc: 0.7193 - val_loss: 1.4158 - val_acc: 0.6205 Epoch 50/200 300/300 [==============================] - 19s 65ms/step - loss: 0.9713 - acc: 0.7205 - val_loss: 1.3490 - val_acc: 0.6282 Epoch 51/200 300/300 [==============================] - 20s 65ms/step - loss: 0.9600 - acc: 0.7253 - val_loss: 1.3885 - val_acc: 0.6248 Epoch 52/200 300/300 [==============================] - 20s 66ms/step - loss: 0.9267 - acc: 0.7306 - val_loss: 1.3704 - val_acc: 0.6340 Epoch 53/200 300/300 [==============================] - 19s 65ms/step - loss: 0.9165 - acc: 0.7352 - val_loss: 1.5241 - val_acc: 0.6086 Epoch 54/200 300/300 [==============================] - 19s 64ms/step - loss: 0.8948 - acc: 0.7417 - val_loss: 1.4158 - val_acc: 0.6216 Epoch 55/200 300/300 [==============================] - 19s 65ms/step - loss: 0.8681 - acc: 0.7466 - val_loss: 1.3422 - val_acc: 0.6360 Epoch 56/200 300/300 [==============================] - 19s 64ms/step - loss: 0.8759 - acc: 0.7435 - val_loss: 1.4220 - val_acc: 0.6253 Epoch 57/200 300/300 [==============================] - 19s 64ms/step - loss: 0.8508 - acc: 0.7519 - val_loss: 1.3671 - val_acc: 0.6320 Epoch 58/200 300/300 [==============================] - 19s 64ms/step - loss: 0.8401 - acc: 0.7539 - val_loss: 1.3270 - val_acc: 0.6442 Epoch 59/200 300/300 [==============================] - 19s 64ms/step - loss: 0.8293 - acc: 0.7581 - val_loss: 1.3308 - val_acc: 0.6384 Epoch 60/200 300/300 [==============================] - 19s 64ms/step - loss: 0.8228 - acc: 0.7597 - val_loss: 1.3555 - val_acc: 0.6411 Epoch 61/200 300/300 [==============================] - 19s 64ms/step - loss: 0.8075 - acc: 0.7618 - val_loss: 1.4964 - val_acc: 0.6080 Epoch 62/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7950 - acc: 0.7673 - val_loss: 1.4309 - val_acc: 0.6260 Epoch 63/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7836 - acc: 0.7674 - val_loss: 1.3378 - val_acc: 0.6439 Epoch 64/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7809 - acc: 0.7713 - val_loss: 1.3294 - val_acc: 0.6486 Epoch 65/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7524 - acc: 0.7791 - val_loss: 1.3583 - val_acc: 0.6411 Epoch 66/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7560 - acc: 0.7772 - val_loss: 1.2827 - val_acc: 0.6568 Epoch 67/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7304 - acc: 0.7841 - val_loss: 1.3639 - val_acc: 0.6396 Epoch 68/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7171 - acc: 0.7876 - val_loss: 1.3529 - val_acc: 0.6435 Epoch 69/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7147 - acc: 0.7867 - val_loss: 1.3744 - val_acc: 0.6406 Epoch 70/200 300/300 [==============================] - 19s 64ms/step - loss: 0.7005 - acc: 0.7900 - val_loss: 1.2839 - val_acc: 0.6528 Epoch 71/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6640 - acc: 0.8022 - val_loss: 1.3810 - val_acc: 0.6384 Epoch 72/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6824 - acc: 0.7961 - val_loss: 1.3979 - val_acc: 0.6348 Epoch 73/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6701 - acc: 0.8002 - val_loss: 1.3736 - val_acc: 0.6450 Epoch 74/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6702 - acc: 0.8039 - val_loss: 1.4071 - val_acc: 0.6382 Epoch 75/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6417 - acc: 0.8095 - val_loss: 1.3420 - val_acc: 0.6544 Epoch 76/200 300/300 [==============================] - 19s 65ms/step - loss: 0.6295 - acc: 0.8131 - val_loss: 1.3477 - val_acc: 0.6574 Epoch 77/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6146 - acc: 0.8182 - val_loss: 1.3863 - val_acc: 0.6441 Epoch 78/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6213 - acc: 0.8153 - val_loss: 1.3992 - val_acc: 0.6396 Epoch 79/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6107 - acc: 0.8187 - val_loss: 1.3447 - val_acc: 0.6625 Epoch 80/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5981 - acc: 0.8173 - val_loss: 1.4105 - val_acc: 0.6458 Epoch 81/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5867 - acc: 0.8252 - val_loss: 1.4135 - val_acc: 0.6382 Epoch 82/200 300/300 [==============================] - 19s 64ms/step - loss: 0.6042 - acc: 0.8175 - val_loss: 1.3240 - val_acc: 0.6587 Epoch 83/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5727 - acc: 0.8284 - val_loss: 1.3527 - val_acc: 0.6481 Epoch 84/200 300/300 [==============================] - 19s 65ms/step - loss: 0.5587 - acc: 0.8323 - val_loss: 1.2985 - val_acc: 0.6684 Epoch 85/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5628 - acc: 0.8306 - val_loss: 1.2966 - val_acc: 0.6670 Epoch 86/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5489 - acc: 0.8323 - val_loss: 1.3886 - val_acc: 0.6461 Epoch 87/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5396 - acc: 0.8375 - val_loss: 1.3048 - val_acc: 0.6630 Epoch 88/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5390 - acc: 0.8383 - val_loss: 1.3447 - val_acc: 0.6633 Epoch 89/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5213 - acc: 0.8408 - val_loss: 1.3342 - val_acc: 0.6638 Epoch 90/200 300/300 [==============================] - 19s 65ms/step - loss: 0.5057 - acc: 0.8475 - val_loss: 1.2658 - val_acc: 0.6760 Epoch 91/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5162 - acc: 0.8480 - val_loss: 1.3229 - val_acc: 0.6647 Epoch 92/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5042 - acc: 0.8478 - val_loss: 1.3672 - val_acc: 0.6605 Epoch 93/200 300/300 [==============================] - 19s 64ms/step - loss: 0.5022 - acc: 0.8490 - val_loss: 1.3298 - val_acc: 0.6655 Epoch 94/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4921 - acc: 0.8505 - val_loss: 1.2960 - val_acc: 0.6711 Epoch 95/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4627 - acc: 0.8616 - val_loss: 1.3228 - val_acc: 0.6607 Epoch 96/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4734 - acc: 0.8558 - val_loss: 1.2850 - val_acc: 0.6752 Epoch 97/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4699 - acc: 0.8581 - val_loss: 1.4863 - val_acc: 0.6382 Epoch 98/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4545 - acc: 0.8614 - val_loss: 1.3685 - val_acc: 0.6640 Epoch 99/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4508 - acc: 0.8604 - val_loss: 1.3882 - val_acc: 0.6632 Epoch 100/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4511 - acc: 0.8619 - val_loss: 1.4118 - val_acc: 0.6546 Epoch 101/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4411 - acc: 0.8649 - val_loss: 1.3719 - val_acc: 0.6677 Epoch 102/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4267 - acc: 0.8708 - val_loss: 1.4089 - val_acc: 0.6592 Epoch 103/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4407 - acc: 0.8667 - val_loss: 1.4326 - val_acc: 0.6531 Epoch 104/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4275 - acc: 0.8704 - val_loss: 1.5173 - val_acc: 0.6286 Epoch 105/200 300/300 [==============================] - 19s 64ms/step - loss: 0.4291 - acc: 0.8716 - val_loss: 1.3546 - val_acc: 0.6676 Training time: 2030.0952756404877
# plotting the learning curves
plot_results(history.history)
score = model_improved2.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 1.265762448310852 Val accuracy: 0.6759999990463257
Tune the best model, which is improved model 2 using the Augmented data and the default data, with the higher validation accuracy and lower validation loss (With no overfitting).
-Parameters input dropout to between 0.1 and 0.2 because dropping the input data can adversely affect the training.
-Parameters intermediate dropout to 0.5 and below because 0.5 is ideal for large datasets and >0.5 is not advised it may cull more connections without boosting the regularization.
def tune_model(hp):
first_dropout=hp.Float('dropout',min_value=0.1, max_value=0.2, sampling="log")
intermediate_dropout=hp.Float('dropout',min_value=0.1, max_value=0.5, sampling="log")
# Choose an optimal value from 0.1, 0.01, 0.001, or 0.0001
learning_rate = hp.Choice('learning_rate', values=[1e-1, 1e-2, 1e-3, 1e-4])
model = tf.keras.Sequential()
model.add(Conv2D(256,(3,3),padding='same',input_shape=(32,32,3)))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(256,(3,3),padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(2,2)))
model.add(Dropout(first_dropout))
model.add(Conv2D(512,(3,3),padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512,(3,3),padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(2,2)))
model.add(Dropout(intermediate_dropout))
model.add(Conv2D(512,(3,3),padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512,(3,3),padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(2,2)))
model.add(Dropout(intermediate_dropout))
model.add(Conv2D(512,(3,3),padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512,(3,3),padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPool2D(pool_size=(2,2)))
model.add(Dropout(intermediate_dropout))
model.add(Flatten())
model.add(Dense(1024))
model.add(Activation('relu'))
model.add(Dropout(intermediate_dropout))
model.add(BatchNormalization(momentum=0.95,
epsilon=0.005,
beta_initializer=RandomNormal(mean=0.0, stddev=0.05),
gamma_initializer=Constant(value=0.9)))
model.add(Dense(100,activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=optimizers.RMSprop(learning_rate),
metrics=['acc'])
model.summary()
return model
Using RandomSearch in keras tuner
import keras_tuner as kt
tuner = kt.RandomSearch(tune_model,
objective='val_acc',
max_trials=5,
overwrite=True)
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_8 (Conv2D) (None, 32, 32, 256) 7168
batch_normalization_9 (Batc (None, 32, 32, 256) 1024
hNormalization)
activation_9 (Activation) (None, 32, 32, 256) 0
conv2d_9 (Conv2D) (None, 32, 32, 256) 590080
batch_normalization_10 (Bat (None, 32, 32, 256) 1024
chNormalization)
activation_10 (Activation) (None, 32, 32, 256) 0
max_pooling2d_4 (MaxPooling (None, 16, 16, 256) 0
2D)
dropout_5 (Dropout) (None, 16, 16, 256) 0
conv2d_10 (Conv2D) (None, 16, 16, 512) 1180160
batch_normalization_11 (Bat (None, 16, 16, 512) 2048
chNormalization)
activation_11 (Activation) (None, 16, 16, 512) 0
conv2d_11 (Conv2D) (None, 16, 16, 512) 2359808
batch_normalization_12 (Bat (None, 16, 16, 512) 2048
chNormalization)
activation_12 (Activation) (None, 16, 16, 512) 0
max_pooling2d_5 (MaxPooling (None, 8, 8, 512) 0
2D)
dropout_6 (Dropout) (None, 8, 8, 512) 0
conv2d_12 (Conv2D) (None, 8, 8, 512) 2359808
batch_normalization_13 (Bat (None, 8, 8, 512) 2048
chNormalization)
activation_13 (Activation) (None, 8, 8, 512) 0
conv2d_13 (Conv2D) (None, 8, 8, 512) 2359808
batch_normalization_14 (Bat (None, 8, 8, 512) 2048
chNormalization)
activation_14 (Activation) (None, 8, 8, 512) 0
max_pooling2d_6 (MaxPooling (None, 4, 4, 512) 0
2D)
dropout_7 (Dropout) (None, 4, 4, 512) 0
conv2d_14 (Conv2D) (None, 4, 4, 512) 2359808
batch_normalization_15 (Bat (None, 4, 4, 512) 2048
chNormalization)
activation_15 (Activation) (None, 4, 4, 512) 0
conv2d_15 (Conv2D) (None, 4, 4, 512) 2359808
batch_normalization_16 (Bat (None, 4, 4, 512) 2048
chNormalization)
activation_16 (Activation) (None, 4, 4, 512) 0
max_pooling2d_7 (MaxPooling (None, 2, 2, 512) 0
2D)
dropout_8 (Dropout) (None, 2, 2, 512) 0
flatten_1 (Flatten) (None, 2048) 0
dense_2 (Dense) (None, 1024) 2098176
activation_17 (Activation) (None, 1024) 0
dropout_9 (Dropout) (None, 1024) 0
batch_normalization_17 (Bat (None, 1024) 4096
chNormalization)
dense_3 (Dense) (None, 100) 102500
=================================================================
Total params: 15,795,556
Trainable params: 15,786,340
Non-trainable params: 9,216
_________________________________________________________________
from tensorflow.keras.models import Sequential
tuner.search((train_datagen.flow(X_train_aug, y_train_aug, batch_size=64)), epochs=30, validation_data=(X_test, y_test), callbacks=[EarlyStopping(
monitor='val_acc',
patience=15,
min_delta=0.0000001,
restore_best_weights=True,
)])
tuner.results_summary(num_trials=3)
Trial 5 Complete [00h 42m 50s] val_acc: 0.6762999892234802 Best val_acc So Far: 0.6773999929428101 Total elapsed time: 03h 46m 16s INFO:tensorflow:Oracle triggered exit Results summary Results in .\untitled_project Showing 3 best trials <keras_tuner.engine.objective.Objective object at 0x000002EA5CADF4C0> Trial summary Hyperparameters: dropout: 0.11680940233031101 learning_rate: 0.0001 Score: 0.6773999929428101 Trial summary Hyperparameters: dropout: 0.1302641091963073 learning_rate: 0.0001 Score: 0.6762999892234802 Trial summary Hyperparameters: dropout: 0.1226629950461613 learning_rate: 0.001 Score: 0.6502000093460083
# Get the optimal hyperparameters
best_hps=tuner.get_best_hyperparameters(num_trials=1)[0]
tuner.search_space_summary()
Search space summary
Default search space size: 2
dropout (Float)
{'default': 0.1, 'conditions': [], 'min_value': 0.1, 'max_value': 0.2, 'step': None, 'sampling': 'log'}
learning_rate (Choice)
{'default': 0.1, 'conditions': [], 'values': [0.1, 0.01, 0.001, 0.0001], 'ordered': True}
# Training model
import time
training_start = time.time()
model = tuner.hypermodel.build(best_hps)
history = model.fit(train_datagen.flow(X_train_aug, y_train_aug, batch_size=64),
steps_per_epoch=300,
epochs=200,
validation_data=(X_test, y_test),
verbose=1,
callbacks=[early_stopping])
training_stop = time.time()
training_time = training_stop - training_start
print(f"Training time: {training_time}")
Model: "sequential_2"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_16 (Conv2D) (None, 32, 32, 256) 7168
batch_normalization_18 (Bat (None, 32, 32, 256) 1024
chNormalization)
activation_18 (Activation) (None, 32, 32, 256) 0
conv2d_17 (Conv2D) (None, 32, 32, 256) 590080
batch_normalization_19 (Bat (None, 32, 32, 256) 1024
chNormalization)
activation_19 (Activation) (None, 32, 32, 256) 0
max_pooling2d_8 (MaxPooling (None, 16, 16, 256) 0
2D)
dropout_10 (Dropout) (None, 16, 16, 256) 0
conv2d_18 (Conv2D) (None, 16, 16, 512) 1180160
batch_normalization_20 (Bat (None, 16, 16, 512) 2048
chNormalization)
activation_20 (Activation) (None, 16, 16, 512) 0
conv2d_19 (Conv2D) (None, 16, 16, 512) 2359808
batch_normalization_21 (Bat (None, 16, 16, 512) 2048
chNormalization)
activation_21 (Activation) (None, 16, 16, 512) 0
max_pooling2d_9 (MaxPooling (None, 8, 8, 512) 0
2D)
dropout_11 (Dropout) (None, 8, 8, 512) 0
conv2d_20 (Conv2D) (None, 8, 8, 512) 2359808
batch_normalization_22 (Bat (None, 8, 8, 512) 2048
chNormalization)
activation_22 (Activation) (None, 8, 8, 512) 0
conv2d_21 (Conv2D) (None, 8, 8, 512) 2359808
batch_normalization_23 (Bat (None, 8, 8, 512) 2048
chNormalization)
activation_23 (Activation) (None, 8, 8, 512) 0
max_pooling2d_10 (MaxPoolin (None, 4, 4, 512) 0
g2D)
dropout_12 (Dropout) (None, 4, 4, 512) 0
conv2d_22 (Conv2D) (None, 4, 4, 512) 2359808
batch_normalization_24 (Bat (None, 4, 4, 512) 2048
chNormalization)
activation_24 (Activation) (None, 4, 4, 512) 0
conv2d_23 (Conv2D) (None, 4, 4, 512) 2359808
batch_normalization_25 (Bat (None, 4, 4, 512) 2048
chNormalization)
activation_25 (Activation) (None, 4, 4, 512) 0
max_pooling2d_11 (MaxPoolin (None, 2, 2, 512) 0
g2D)
dropout_13 (Dropout) (None, 2, 2, 512) 0
flatten_2 (Flatten) (None, 2048) 0
dense_4 (Dense) (None, 1024) 2098176
activation_26 (Activation) (None, 1024) 0
dropout_14 (Dropout) (None, 1024) 0
batch_normalization_26 (Bat (None, 1024) 4096
chNormalization)
dense_5 (Dense) (None, 100) 102500
=================================================================
Total params: 15,795,556
Trainable params: 15,786,340
Non-trainable params: 9,216
_________________________________________________________________
Epoch 1/200
300/300 [==============================] - 22s 66ms/step - loss: 4.1326 - acc: 0.0896 - val_loss: 5.1448 - val_acc: 0.0147
Epoch 2/200
300/300 [==============================] - 19s 64ms/step - loss: 3.5833 - acc: 0.1588 - val_loss: 3.6740 - val_acc: 0.1461
Epoch 3/200
300/300 [==============================] - 19s 64ms/step - loss: 3.2336 - acc: 0.2193 - val_loss: 3.0496 - val_acc: 0.2587
Epoch 4/200
300/300 [==============================] - 19s 64ms/step - loss: 2.9714 - acc: 0.2643 - val_loss: 3.2687 - val_acc: 0.2257
Epoch 5/200
300/300 [==============================] - 19s 64ms/step - loss: 2.7811 - acc: 0.3031 - val_loss: 2.8468 - val_acc: 0.2976
Epoch 6/200
300/300 [==============================] - 19s 64ms/step - loss: 2.5924 - acc: 0.3392 - val_loss: 2.5952 - val_acc: 0.3469
Epoch 7/200
300/300 [==============================] - 19s 64ms/step - loss: 2.4447 - acc: 0.3704 - val_loss: 2.5574 - val_acc: 0.3592
Epoch 8/200
300/300 [==============================] - 19s 64ms/step - loss: 2.3192 - acc: 0.3972 - val_loss: 2.4260 - val_acc: 0.3759
Epoch 9/200
300/300 [==============================] - 19s 64ms/step - loss: 2.1932 - acc: 0.4259 - val_loss: 2.1266 - val_acc: 0.4396
Epoch 10/200
300/300 [==============================] - 19s 64ms/step - loss: 2.1118 - acc: 0.4402 - val_loss: 2.2023 - val_acc: 0.4280
Epoch 11/200
300/300 [==============================] - 19s 64ms/step - loss: 2.0223 - acc: 0.4633 - val_loss: 2.4311 - val_acc: 0.3874
Epoch 12/200
300/300 [==============================] - 19s 64ms/step - loss: 1.9408 - acc: 0.4802 - val_loss: 2.1434 - val_acc: 0.4351
Epoch 13/200
300/300 [==============================] - 20s 67ms/step - loss: 1.8678 - acc: 0.5020 - val_loss: 1.9286 - val_acc: 0.4878
Epoch 14/200
300/300 [==============================] - 19s 64ms/step - loss: 1.8216 - acc: 0.5118 - val_loss: 2.0723 - val_acc: 0.4611
Epoch 15/200
300/300 [==============================] - 19s 64ms/step - loss: 1.7361 - acc: 0.5302 - val_loss: 2.1036 - val_acc: 0.4514
Epoch 16/200
300/300 [==============================] - 19s 64ms/step - loss: 1.6914 - acc: 0.5441 - val_loss: 2.0268 - val_acc: 0.4655
Epoch 17/200
300/300 [==============================] - 19s 64ms/step - loss: 1.6223 - acc: 0.5596 - val_loss: 1.9267 - val_acc: 0.4984
Epoch 18/200
300/300 [==============================] - 19s 64ms/step - loss: 1.5651 - acc: 0.5725 - val_loss: 1.9986 - val_acc: 0.4745
Epoch 19/200
300/300 [==============================] - 19s 64ms/step - loss: 1.5343 - acc: 0.5761 - val_loss: 1.7857 - val_acc: 0.5188
Epoch 20/200
300/300 [==============================] - 19s 64ms/step - loss: 1.5193 - acc: 0.5768 - val_loss: 1.6923 - val_acc: 0.5451
Epoch 21/200
300/300 [==============================] - 19s 64ms/step - loss: 1.4420 - acc: 0.5973 - val_loss: 1.7427 - val_acc: 0.5304
Epoch 22/200
300/300 [==============================] - 19s 64ms/step - loss: 1.4190 - acc: 0.6079 - val_loss: 1.5633 - val_acc: 0.5729
Epoch 23/200
300/300 [==============================] - 19s 64ms/step - loss: 1.3633 - acc: 0.6215 - val_loss: 1.6140 - val_acc: 0.5627
Epoch 24/200
300/300 [==============================] - 19s 64ms/step - loss: 1.3164 - acc: 0.6327 - val_loss: 1.6230 - val_acc: 0.5575
Epoch 25/200
300/300 [==============================] - 19s 64ms/step - loss: 1.3001 - acc: 0.6356 - val_loss: 1.6884 - val_acc: 0.5457
Epoch 26/200
300/300 [==============================] - 19s 64ms/step - loss: 1.2457 - acc: 0.6520 - val_loss: 1.7853 - val_acc: 0.5291
Epoch 27/200
300/300 [==============================] - 19s 64ms/step - loss: 1.2175 - acc: 0.6591 - val_loss: 1.6366 - val_acc: 0.5644
Epoch 28/200
300/300 [==============================] - 19s 64ms/step - loss: 1.1798 - acc: 0.6668 - val_loss: 1.6315 - val_acc: 0.5649
Epoch 29/200
300/300 [==============================] - 19s 64ms/step - loss: 1.1755 - acc: 0.6677 - val_loss: 1.5880 - val_acc: 0.5771
Epoch 30/200
300/300 [==============================] - 19s 64ms/step - loss: 1.1412 - acc: 0.6748 - val_loss: 1.5724 - val_acc: 0.5747
Epoch 31/200
300/300 [==============================] - 19s 64ms/step - loss: 1.1081 - acc: 0.6828 - val_loss: 1.7503 - val_acc: 0.5433
Epoch 32/200
300/300 [==============================] - 19s 65ms/step - loss: 1.0940 - acc: 0.6931 - val_loss: 1.5523 - val_acc: 0.5838
Epoch 33/200
300/300 [==============================] - 19s 64ms/step - loss: 1.0601 - acc: 0.6978 - val_loss: 1.6688 - val_acc: 0.5599
Epoch 34/200
300/300 [==============================] - 19s 64ms/step - loss: 1.0284 - acc: 0.7065 - val_loss: 1.4956 - val_acc: 0.5981
Epoch 35/200
300/300 [==============================] - 19s 64ms/step - loss: 1.0201 - acc: 0.7086 - val_loss: 1.5548 - val_acc: 0.5873
Epoch 36/200
300/300 [==============================] - 19s 64ms/step - loss: 0.9803 - acc: 0.7215 - val_loss: 1.4471 - val_acc: 0.6091
Epoch 37/200
300/300 [==============================] - 19s 64ms/step - loss: 0.9593 - acc: 0.7274 - val_loss: 1.5105 - val_acc: 0.5966
Epoch 38/200
300/300 [==============================] - 19s 64ms/step - loss: 0.9437 - acc: 0.7299 - val_loss: 1.5075 - val_acc: 0.5992
Epoch 39/200
300/300 [==============================] - 19s 64ms/step - loss: 0.9245 - acc: 0.7349 - val_loss: 1.4079 - val_acc: 0.6251
Epoch 40/200
300/300 [==============================] - 19s 64ms/step - loss: 0.8967 - acc: 0.7460 - val_loss: 1.5805 - val_acc: 0.5828
Epoch 41/200
300/300 [==============================] - 19s 64ms/step - loss: 0.8773 - acc: 0.7472 - val_loss: 1.4830 - val_acc: 0.5998
Epoch 42/200
300/300 [==============================] - 19s 64ms/step - loss: 0.8493 - acc: 0.7544 - val_loss: 1.6109 - val_acc: 0.5836
Epoch 43/200
300/300 [==============================] - 19s 64ms/step - loss: 0.8291 - acc: 0.7607 - val_loss: 1.4966 - val_acc: 0.5999
Epoch 44/200
300/300 [==============================] - 19s 64ms/step - loss: 0.8291 - acc: 0.7613 - val_loss: 1.5137 - val_acc: 0.6040
Epoch 45/200
300/300 [==============================] - 19s 64ms/step - loss: 0.8087 - acc: 0.7652 - val_loss: 1.6221 - val_acc: 0.5819
Epoch 46/200
300/300 [==============================] - 19s 64ms/step - loss: 0.7900 - acc: 0.7708 - val_loss: 1.3488 - val_acc: 0.6338
Epoch 47/200
300/300 [==============================] - 19s 64ms/step - loss: 0.7527 - acc: 0.7796 - val_loss: 1.3890 - val_acc: 0.6301
Epoch 48/200
300/300 [==============================] - 19s 64ms/step - loss: 0.7573 - acc: 0.7764 - val_loss: 1.4800 - val_acc: 0.6106
Epoch 49/200
300/300 [==============================] - 19s 64ms/step - loss: 0.7401 - acc: 0.7816 - val_loss: 1.4278 - val_acc: 0.6200
Epoch 50/200
300/300 [==============================] - 19s 64ms/step - loss: 0.7075 - acc: 0.7942 - val_loss: 1.4544 - val_acc: 0.6164
Epoch 51/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6973 - acc: 0.7973 - val_loss: 1.5805 - val_acc: 0.5947
Epoch 52/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6762 - acc: 0.8045 - val_loss: 1.4444 - val_acc: 0.6260
Epoch 53/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6731 - acc: 0.8015 - val_loss: 1.5137 - val_acc: 0.6121
Epoch 54/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6513 - acc: 0.8082 - val_loss: 1.4514 - val_acc: 0.6218
Epoch 55/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6430 - acc: 0.8129 - val_loss: 1.4529 - val_acc: 0.6202
Epoch 56/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6234 - acc: 0.8167 - val_loss: 1.3755 - val_acc: 0.6460
Epoch 57/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6075 - acc: 0.8226 - val_loss: 1.4791 - val_acc: 0.6297
Epoch 58/200
300/300 [==============================] - 19s 64ms/step - loss: 0.6106 - acc: 0.8195 - val_loss: 1.4022 - val_acc: 0.6385
Epoch 59/200
300/300 [==============================] - 19s 64ms/step - loss: 0.5832 - acc: 0.8311 - val_loss: 1.4741 - val_acc: 0.6220
Epoch 60/200
300/300 [==============================] - 19s 64ms/step - loss: 0.5680 - acc: 0.8317 - val_loss: 1.5114 - val_acc: 0.6162
Epoch 61/200
300/300 [==============================] - 19s 64ms/step - loss: 0.5558 - acc: 0.8358 - val_loss: 1.4072 - val_acc: 0.6357
Epoch 62/200
300/300 [==============================] - 19s 64ms/step - loss: 0.5439 - acc: 0.8397 - val_loss: 1.4371 - val_acc: 0.6281
Epoch 63/200
300/300 [==============================] - 19s 64ms/step - loss: 0.5305 - acc: 0.8432 - val_loss: 1.4132 - val_acc: 0.6347
Epoch 64/200
300/300 [==============================] - 19s 64ms/step - loss: 0.5172 - acc: 0.8470 - val_loss: 1.5731 - val_acc: 0.6058
Epoch 65/200
300/300 [==============================] - 19s 64ms/step - loss: 0.5089 - acc: 0.8495 - val_loss: 1.4660 - val_acc: 0.6259
Epoch 66/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4984 - acc: 0.8535 - val_loss: 1.4221 - val_acc: 0.6418
Epoch 67/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4922 - acc: 0.8542 - val_loss: 1.5947 - val_acc: 0.6094
Epoch 68/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4890 - acc: 0.8555 - val_loss: 1.4839 - val_acc: 0.6354
Epoch 69/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4803 - acc: 0.8562 - val_loss: 1.4703 - val_acc: 0.6362
Epoch 70/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4563 - acc: 0.8634 - val_loss: 1.4026 - val_acc: 0.6530
Epoch 71/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4469 - acc: 0.8649 - val_loss: 1.4065 - val_acc: 0.6386
Epoch 72/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4603 - acc: 0.8645 - val_loss: 1.3917 - val_acc: 0.6533
Epoch 73/200
300/300 [==============================] - 19s 63ms/step - loss: 0.4402 - acc: 0.8689 - val_loss: 1.5601 - val_acc: 0.6205
Epoch 74/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4299 - acc: 0.8706 - val_loss: 1.4498 - val_acc: 0.6394
Epoch 75/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4311 - acc: 0.8704 - val_loss: 1.4303 - val_acc: 0.6505
Epoch 76/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4170 - acc: 0.8757 - val_loss: 1.5333 - val_acc: 0.6283
Epoch 77/200
300/300 [==============================] - 19s 64ms/step - loss: 0.4104 - acc: 0.8782 - val_loss: 1.4618 - val_acc: 0.6433
Epoch 78/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3973 - acc: 0.8807 - val_loss: 1.5840 - val_acc: 0.6233
Epoch 79/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3820 - acc: 0.8861 - val_loss: 1.3876 - val_acc: 0.6462
Epoch 80/200
300/300 [==============================] - 19s 63ms/step - loss: 0.3771 - acc: 0.8848 - val_loss: 1.4109 - val_acc: 0.6561
Epoch 81/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3733 - acc: 0.8865 - val_loss: 1.4412 - val_acc: 0.6544
Epoch 82/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3618 - acc: 0.8921 - val_loss: 1.4150 - val_acc: 0.6562
Epoch 83/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3667 - acc: 0.8894 - val_loss: 1.4560 - val_acc: 0.6490
Epoch 84/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3540 - acc: 0.8938 - val_loss: 1.4603 - val_acc: 0.6448
Epoch 85/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3588 - acc: 0.8927 - val_loss: 1.4860 - val_acc: 0.6480
Epoch 86/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3437 - acc: 0.8960 - val_loss: 1.5316 - val_acc: 0.6348
Epoch 87/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3417 - acc: 0.8980 - val_loss: 1.4304 - val_acc: 0.6535
Epoch 88/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3356 - acc: 0.8965 - val_loss: 1.4440 - val_acc: 0.6545
Epoch 89/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3173 - acc: 0.9068 - val_loss: 1.4565 - val_acc: 0.6540
Epoch 90/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3147 - acc: 0.9068 - val_loss: 1.4304 - val_acc: 0.6550
Epoch 91/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3154 - acc: 0.9069 - val_loss: 1.4757 - val_acc: 0.6529
Epoch 92/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3046 - acc: 0.9076 - val_loss: 1.4965 - val_acc: 0.6471
Epoch 93/200
300/300 [==============================] - 19s 64ms/step - loss: 0.3050 - acc: 0.9072 - val_loss: 1.4920 - val_acc: 0.6539
Epoch 94/200
300/300 [==============================] - 19s 64ms/step - loss: 0.2973 - acc: 0.9107 - val_loss: 1.5544 - val_acc: 0.6442
Epoch 95/200
300/300 [==============================] - 19s 64ms/step - loss: 0.2941 - acc: 0.9089 - val_loss: 1.4915 - val_acc: 0.6536
Epoch 96/200
300/300 [==============================] - 19s 64ms/step - loss: 0.2864 - acc: 0.9137 - val_loss: 1.4920 - val_acc: 0.6416
Epoch 97/200
300/300 [==============================] - 19s 64ms/step - loss: 0.2905 - acc: 0.9147 - val_loss: 1.5498 - val_acc: 0.6407
Training time: 1869.0101566314697
# plotting the learning curves
plot_results(history.history)
Scores show lower validation accuracy and higher validation loss
score = model.evaluate(X_test, y_test, verbose = 0)
print('Val loss:', score[0])
print('Val accuracy:', score[1])
Val loss: 1.4150466918945312 Val accuracy: 0.6561999917030334
Now, let’s use our best model before and after the hypertuning to make predictions on the test set and check our results.
# To plot the visualisation of the predictions
def plot_image(i, predictions_array, true_label, img):
predictions_array, true_label, img = predictions_array[i], true_label[i], img[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
plt.imshow(img, cmap=plt.cm.binary)
predicted_label = np.argmax(predictions_array)
if predicted_label == list(true_label).index(1):
color = 'blue'
else:
color = 'red'
plt.xlabel("{} {:2.0f}% ({})".format(class_names[predicted_label],
100*np.max(predictions_array),
class_names[list(true_label).index(1)]),
color=color)
def plot_value_array(i, predictions_array, true_label):
predictions_array, true_label = predictions_array[i],true_label[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
thisplot = plt.bar(range(100), predictions_array, color="#777777")
plt.ylim([0, 1])
predicted_label = np.argmax(predictions_array)
thisplot[predicted_label].set_color('red')
thisplot[list(true_label).index(1)].set_color('blue')
from sklearn.metrics import accuracy_score
predict= model_improved2.predict(X_test)
313/313 [==============================] - 3s 8ms/step
pred_labels=np.argmax(predict, axis=1)
eval_result = model_improved2.evaluate(X_test, y_test)
print("[test loss, test accuracy]:", eval_result)
313/313 [==============================] - 3s 10ms/step - loss: 1.3366 - acc: 0.6476 [test loss, test accuracy]: [1.3365967273712158, 0.647599995136261]
import sklearn.metrics as metrics
print(metrics.classification_report(data['Class'], pred_labels, target_names=class_names))
precision recall f1-score support
apple 0.93 0.79 0.85 100
aquarium_fish 0.73 0.80 0.76 100
baby 0.58 0.56 0.57 100
bear 0.55 0.48 0.51 100
beaver 0.55 0.38 0.45 100
bed 0.69 0.58 0.63 100
bee 0.70 0.78 0.74 100
beetle 0.60 0.73 0.66 100
bicycle 0.82 0.84 0.83 100
bottle 0.79 0.71 0.75 100
bowl 0.64 0.42 0.51 100
boy 0.52 0.37 0.43 100
bridge 0.64 0.74 0.69 100
bus 0.49 0.75 0.59 100
butterfly 0.48 0.82 0.61 100
camel 0.65 0.71 0.68 100
can 0.77 0.58 0.66 100
castle 0.86 0.68 0.76 100
caterpillar 0.71 0.59 0.64 100
cattle 0.76 0.59 0.66 100
chair 0.80 0.86 0.83 100
chimpanzee 0.90 0.75 0.82 100
clock 0.69 0.61 0.65 100
cloud 0.92 0.48 0.63 100
cockroach 0.96 0.68 0.80 100
couch 0.65 0.55 0.59 100
crab 0.53 0.68 0.59 100
crocodile 0.43 0.49 0.46 100
cup 0.86 0.64 0.74 100
dinosaur 0.46 0.76 0.57 100
dolphin 0.65 0.48 0.55 100
elephant 0.81 0.56 0.66 100
flatfish 0.68 0.51 0.58 100
forest 0.61 0.60 0.61 100
fox 0.66 0.68 0.67 100
girl 0.49 0.48 0.49 100
hamster 0.88 0.63 0.73 100
house 0.75 0.58 0.66 100
kangaroo 0.79 0.37 0.50 100
computer_keyboard 0.82 0.84 0.83 100
lamp 0.74 0.42 0.54 100
lawn_mower 0.91 0.74 0.82 100
leopard 0.62 0.75 0.68 100
lion 0.91 0.64 0.75 100
lizard 0.29 0.49 0.36 100
lobster 0.39 0.71 0.50 100
man 0.55 0.55 0.55 100
maple_tree 0.70 0.63 0.66 100
motorcycle 0.78 0.91 0.84 100
mountain 0.65 0.85 0.74 100
mouse 0.44 0.47 0.45 100
mushroom 0.65 0.81 0.72 100
oak_tree 0.61 0.62 0.61 100
orange 0.79 0.90 0.84 100
orchid 0.76 0.69 0.72 100
otter 0.45 0.22 0.30 100
palm_tree 0.81 0.91 0.85 100
pear 0.70 0.71 0.70 100
pickup_truck 0.63 0.85 0.73 100
pine_tree 0.56 0.71 0.63 100
plain 0.95 0.57 0.71 100
plate 0.81 0.54 0.65 100
poppy 0.94 0.49 0.64 100
porcupine 0.77 0.54 0.64 100
possum 0.71 0.46 0.56 100
rabbit 0.64 0.47 0.54 100
raccoon 0.55 0.74 0.63 100
ray 0.57 0.56 0.56 100
road 0.86 0.90 0.88 100
rocket 0.84 0.66 0.74 100
rose 0.75 0.65 0.70 100
sea 0.76 0.70 0.73 100
seal 0.28 0.50 0.36 100
shark 0.57 0.39 0.46 100
shrew 0.38 0.47 0.42 100
skunk 0.86 0.83 0.85 100
skyscraper 0.81 0.85 0.83 100
snail 0.56 0.65 0.60 100
snake 0.36 0.76 0.48 100
spider 0.70 0.73 0.71 100
squirrel 0.49 0.50 0.49 100
streetcar 0.64 0.65 0.64 100
sunflower 0.93 0.88 0.90 100
sweet_pepper 0.67 0.64 0.65 100
table 0.77 0.55 0.64 100
tank 0.79 0.72 0.75 100
telephone 0.64 0.72 0.68 100
television 0.67 0.82 0.74 100
tiger 0.83 0.72 0.77 100
tractor 0.58 0.82 0.68 100
train 0.75 0.63 0.68 100
trout 0.82 0.70 0.76 100
tulip 0.51 0.76 0.61 100
turtle 0.36 0.65 0.47 100
wardrobe 0.91 0.83 0.87 100
whale 0.62 0.68 0.65 100
willow_tree 0.71 0.53 0.61 100
wolf 0.75 0.69 0.72 100
woman 0.55 0.37 0.44 100
worm 0.56 0.73 0.63 100
accuracy 0.65 10000
macro avg 0.68 0.65 0.65 10000
weighted avg 0.68 0.65 0.65 10000
Correct prediction labels are blue and incorrect prediction labels are red. The number gives the percent (out of 100) for the predicted label.
# Plot the first X test images, their predicted label, and the true label
# Color correct predictions in blue, incorrect predictions in red
num_rows = 5
num_cols = 3
num_images = num_rows*num_cols
plt.figure(figsize=(2*2*num_cols, 2*num_rows))
for i in range(num_images):
plt.subplot(num_rows, 2*num_cols, 2*i+1)
plot_image(i, predict, y_test, X_test)
plt.subplot(num_rows, 2*num_cols, 2*i+2)
plot_value_array(i, predict, y_test)
plt.show()
import seaborn as sns
prob_confusion_matrix = metrics.confusion_matrix(data['Class'], pred_labels)
plt.figure(figsize=(16,16))
sns.heatmap(prob_confusion_matrix, annot=True, fmt="d", cmap = 'viridis'
, xticklabels = class_names, yticklabels = class_names, annot_kws={'size': 13})
plt.xlabel("Predicted Label", fontsize= 13)
plt.ylabel("True Label", fontsize= 13)
plt.show()
predict= model.predict(X_test)
313/313 [==============================] - 3s 8ms/step
eval_result = model.evaluate(X_test, y_test)
print("[test loss, test accuracy]:", eval_result)
313/313 [==============================] - 3s 10ms/step - loss: 1.4150 - acc: 0.6562 [test loss, test accuracy]: [1.4150466918945312, 0.6561999917030334]
pred_labels=np.argmax(predict, axis=1)
import sklearn.metrics as metrics
print(metrics.classification_report(data['Class'], pred_labels, target_names=class_names))
precision recall f1-score support
apple 0.97 0.77 0.86 100
aquarium_fish 0.93 0.78 0.85 100
baby 0.68 0.50 0.58 100
bear 0.67 0.42 0.52 100
beaver 0.50 0.57 0.53 100
bed 0.75 0.59 0.66 100
bee 0.69 0.79 0.74 100
beetle 0.79 0.64 0.71 100
bicycle 0.87 0.80 0.83 100
bottle 0.49 0.82 0.61 100
bowl 0.63 0.44 0.52 100
boy 0.45 0.32 0.37 100
bridge 0.68 0.79 0.73 100
bus 0.55 0.65 0.60 100
butterfly 0.63 0.72 0.67 100
camel 0.57 0.71 0.63 100
can 0.50 0.78 0.61 100
castle 0.79 0.78 0.78 100
caterpillar 0.47 0.73 0.57 100
cattle 0.56 0.64 0.60 100
chair 0.88 0.80 0.84 100
chimpanzee 0.88 0.76 0.82 100
clock 0.61 0.73 0.66 100
cloud 0.82 0.79 0.81 100
cockroach 0.62 0.88 0.73 100
couch 0.59 0.59 0.59 100
crab 0.52 0.64 0.57 100
crocodile 0.53 0.48 0.50 100
cup 0.77 0.79 0.78 100
dinosaur 0.75 0.63 0.68 100
dolphin 0.62 0.60 0.61 100
elephant 0.87 0.41 0.56 100
flatfish 0.51 0.72 0.60 100
forest 0.59 0.64 0.62 100
fox 0.86 0.62 0.72 100
girl 0.35 0.62 0.45 100
hamster 0.91 0.50 0.65 100
house 0.74 0.62 0.67 100
kangaroo 0.45 0.65 0.53 100
computer_keyboard 0.69 0.82 0.75 100
lamp 0.63 0.59 0.61 100
lawn_mower 0.93 0.85 0.89 100
leopard 0.61 0.73 0.66 100
lion 0.80 0.72 0.76 100
lizard 0.35 0.47 0.40 100
lobster 0.65 0.53 0.58 100
man 0.57 0.47 0.52 100
maple_tree 0.62 0.71 0.66 100
motorcycle 0.83 0.90 0.87 100
mountain 0.81 0.79 0.80 100
mouse 0.36 0.70 0.48 100
mushroom 0.79 0.61 0.69 100
oak_tree 0.60 0.58 0.59 100
orange 0.88 0.83 0.86 100
orchid 0.91 0.68 0.78 100
otter 0.33 0.46 0.38 100
palm_tree 0.99 0.78 0.87 100
pear 0.72 0.70 0.71 100
pickup_truck 0.93 0.64 0.76 100
pine_tree 0.67 0.65 0.66 100
plain 0.71 0.87 0.78 100
plate 0.78 0.54 0.64 100
poppy 0.90 0.46 0.61 100
porcupine 0.83 0.63 0.72 100
possum 0.66 0.33 0.44 100
rabbit 0.50 0.48 0.49 100
raccoon 0.87 0.62 0.73 100
ray 0.58 0.52 0.55 100
road 0.95 0.87 0.91 100
rocket 0.72 0.76 0.74 100
rose 0.78 0.61 0.69 100
sea 0.82 0.58 0.68 100
seal 0.39 0.48 0.43 100
shark 0.69 0.35 0.46 100
shrew 0.44 0.54 0.48 100
skunk 0.91 0.83 0.87 100
skyscraper 0.95 0.80 0.87 100
snail 0.67 0.60 0.63 100
snake 0.61 0.59 0.60 100
spider 0.55 0.78 0.65 100
squirrel 0.71 0.42 0.53 100
streetcar 0.63 0.79 0.70 100
sunflower 0.89 0.86 0.87 100
sweet_pepper 0.76 0.62 0.68 100
table 0.55 0.71 0.62 100
tank 0.80 0.74 0.77 100
telephone 0.55 0.74 0.63 100
television 0.88 0.61 0.72 100
tiger 0.79 0.76 0.78 100
tractor 0.85 0.68 0.76 100
train 0.44 0.86 0.58 100
trout 0.79 0.71 0.75 100
tulip 0.52 0.72 0.60 100
turtle 0.49 0.56 0.52 100
wardrobe 0.74 0.88 0.80 100
whale 0.72 0.65 0.68 100
willow_tree 0.66 0.49 0.56 100
wolf 0.74 0.67 0.71 100
woman 0.48 0.40 0.43 100
worm 0.86 0.59 0.70 100
accuracy 0.66 10000
macro avg 0.69 0.66 0.66 10000
weighted avg 0.69 0.66 0.66 10000
text = open('predictions','w')
text.write(np.array2string(pred_labels))
text.close()
import seaborn as sns
prob_confusion_matrix = metrics.confusion_matrix(data['Class'], pred_labels)
plt.figure(figsize=(16,16))
sns.heatmap(prob_confusion_matrix, annot=True, fmt="d", cmap = 'viridis'
, xticklabels = class_names, yticklabels = class_names, annot_kws={'size': 13})
plt.xlabel("Predicted Label", fontsize= 13)
plt.ylabel("True Label", fontsize= 13)
plt.show()
# Plot the first X test images, their predicted label, and the true label
# Color correct predictions in blue, incorrect predictions in red
num_rows = 5
num_cols = 3
num_images = num_rows*num_cols
plt.figure(figsize=(2*2*num_cols, 2*num_rows))
for i in range(num_images):
plt.subplot(num_rows, 2*num_cols, 2*i+1)
plot_image(i, predict, y_test, X_test)
plt.subplot(num_rows, 2*num_cols, 2*i+2)
plot_value_array(i, predict, y_test)
plt.show()
# Save model weights
tf.keras.models.save_model(
model_improved1,
"model_improved1.h5",
overwrite=True,
include_optimizer=True,
save_format=None,
signatures=None,
options=None,
save_traces=True
)
# Save model weights
tf.keras.models.save_model(
model_improved2,
"model_improved2.h5",
overwrite=True,
include_optimizer=True,
save_format=None,
signatures=None,
options=None,
save_traces=True
)
# Save model weights
tf.keras.models.save_model(
model,
"tuned_model.h5",
overwrite=True,
include_optimizer=True,
save_format=None,
signatures=None,
options=None,
save_traces=True
)